diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c45ceaa63..f70432267 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -27,15 +27,6 @@ jobs: steps: - # Get vcpython27 on Windows + Python 2.7, to build netifaces - # extension. See https://chocolatey.org/packages/vcpython27 and - # https://github.com/crazy-max/ghaction-chocolatey - - name: Install MSVC 9.0 for Python 2.7 [Windows] - if: matrix.os == 'windows-latest' && matrix.python-version == '2.7' - uses: crazy-max/ghaction-chocolatey@v1 - with: - args: install vcpython27 - # See https://github.com/actions/checkout. A fetch-depth of 0 # fetches all tags and branches. - name: Check out Tahoe-LAFS sources @@ -44,10 +35,35 @@ jobs: fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} + if: ${{ matrix.os != 'windows-latest' }} uses: actions/setup-python@v1 with: python-version: ${{ matrix.python-version }} + # See note below about need for using 32-bit Python 2.7 on + # Windows. The extra handling here for Python 3.6 on Windows is + # because I could not figure out the right GitHub Actions + # expression to do this in a better way. + - name: Set up Python ${{ matrix.python-version }} [Windows x64] + if: ${{ ( matrix.os == 'windows-latest' ) && ( matrix.python-version == '3.6' ) }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + architecture: 'x64' + + # We use netifaces, which does not ship a 64-bit wheel for the + # Python 2.7 + Windows combination, but it ships a 32-bit wheel. + # Since MS has removed vcpython27 compiler downloads from their + # usual download site, building a netifaces wheel locally is not + # an option anymore. So let us just test with 32-bit Python on + # Windows. + - name: Set up Python ${{ matrix.python-version }} [Windows x86] + if: ${{ ( matrix.os == 'windows-latest' ) && ( matrix.python-version == '2.7' ) }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + architecture: 'x86' + # To use pip caching with GitHub Actions in an OS-independent # manner, we need `pip cache dir` command, which became # available since pip v20.1+. At the time of writing this, @@ -164,15 +180,6 @@ jobs: steps: - # Get vcpython27 for Windows + Python 2.7, to build netifaces - # extension. See https://chocolatey.org/packages/vcpython27 and - # https://github.com/crazy-max/ghaction-chocolatey - - name: Install MSVC 9.0 for Python 2.7 [Windows] - if: matrix.os == 'windows-latest' && matrix.python-version == '2.7' - uses: crazy-max/ghaction-chocolatey@v1 - with: - args: install vcpython27 - - name: Install Tor [Ubuntu] if: matrix.os == 'ubuntu-latest' run: sudo apt install tor @@ -193,10 +200,19 @@ jobs: fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} + if: ${{ matrix.os != 'windows-latest' }} uses: actions/setup-python@v1 with: python-version: ${{ matrix.python-version }} + # See this step under coverage job. + - name: Set up Python ${{ matrix.python-version }} [Windows x86] + if: ${{ matrix.os == 'windows-latest' }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + architecture: 'x86' + - name: Get pip cache directory id: pip-cache run: | @@ -242,25 +258,25 @@ jobs: steps: - # Get vcpython27 for Windows + Python 2.7, to build netifaces - # extension. See https://chocolatey.org/packages/vcpython27 and - # https://github.com/crazy-max/ghaction-chocolatey - - name: Install MSVC 9.0 for Python 2.7 [Windows] - if: matrix.os == 'windows-latest' && matrix.python-version == '2.7' - uses: crazy-max/ghaction-chocolatey@v1 - with: - args: install vcpython27 - - name: Check out Tahoe-LAFS sources uses: actions/checkout@v2 with: fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} + if: ${{ matrix.os != 'windows-latest' }} uses: actions/setup-python@v1 with: python-version: ${{ matrix.python-version }} + # See this step under coverage job. + - name: Set up Python ${{ matrix.python-version }} [Windows x86] + if: ${{ matrix.os == 'windows-latest' }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + architecture: 'x86' + - name: Get pip cache directory id: pip-cache run: | diff --git a/docs/frontends/FTP-and-SFTP.rst b/docs/frontends/FTP-and-SFTP.rst index ee6371812..9d4f1dcec 100644 --- a/docs/frontends/FTP-and-SFTP.rst +++ b/docs/frontends/FTP-and-SFTP.rst @@ -7,11 +7,10 @@ Tahoe-LAFS SFTP Frontend 1. `SFTP Background`_ 2. `Tahoe-LAFS Support`_ 3. `Creating an Account File`_ -4. `Running An Account Server (accounts.url)`_ -5. `Configuring SFTP Access`_ -6. `Dependencies`_ -7. `Immutable and Mutable Files`_ -8. `Known Issues`_ +4. `Configuring SFTP Access`_ +5. `Dependencies`_ +6. `Immutable and Mutable Files`_ +7. `Known Issues`_ SFTP Background @@ -78,33 +77,6 @@ start with "ssh-". Now add an ``accounts.file`` directive to your ``tahoe.cfg`` file, as described in the next sections. -Running An Account Server (accounts.url) -======================================== - -The accounts.url directive allows access requests to be controlled by an -HTTP-based login service, useful for centralized deployments. This was used -by AllMyData to provide web-based file access, where the service used a -simple PHP script and database lookups to map an account email address and -password to a Tahoe-LAFS directory cap. The service will receive a -multipart/form-data POST, just like one created with a
and -fields, with three parameters: - -• action: "authenticate" (this is a static string) -• email: USERNAME (Tahoe-LAFS has no notion of email addresses, but the - authentication service uses them as account names, so the interface - presents this argument as "email" rather than "username"). -• passwd: PASSWORD - -It should return a single string that either contains a Tahoe-LAFS directory -cap (URI:DIR2:...), or "0" to indicate a login failure. - -Tahoe-LAFS recommends the service be secure, preferably localhost-only. This -makes it harder for attackers to brute force the password or use DNS -poisoning to cause the Tahoe-LAFS gateway to talk with the wrong server, -thereby revealing the usernames and passwords. - -Public key authentication is not supported when an account server is used. - Configuring SFTP Access ======================= diff --git a/integration/test_servers_of_happiness.py b/integration/test_servers_of_happiness.py index 97392bf00..1f350eb8e 100644 --- a/integration/test_servers_of_happiness.py +++ b/integration/test_servers_of_happiness.py @@ -30,7 +30,7 @@ def test_upload_immutable(reactor, temp_dir, introducer_furl, flog_gatherer, sto proto, sys.executable, [ - sys.executable, '-m', 'allmydata.scripts.runner', + sys.executable, '-b', '-m', 'allmydata.scripts.runner', '-d', node_dir, 'put', __file__, ] diff --git a/integration/test_tor.py b/integration/test_tor.py index dcbfb1151..3b374f669 100644 --- a/integration/test_tor.py +++ b/integration/test_tor.py @@ -46,7 +46,7 @@ def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_ne proto, sys.executable, ( - sys.executable, '-m', 'allmydata.scripts.runner', + sys.executable, '-b', '-m', 'allmydata.scripts.runner', '-d', join(temp_dir, 'carol'), 'put', gold_path, ) @@ -60,7 +60,7 @@ def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_ne proto, sys.executable, ( - sys.executable, '-m', 'allmydata.scripts.runner', + sys.executable, '-b', '-m', 'allmydata.scripts.runner', '-d', join(temp_dir, 'dave'), 'get', cap, ) @@ -84,7 +84,7 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_ proto, sys.executable, ( - sys.executable, '-m', 'allmydata.scripts.runner', + sys.executable, '-b', '-m', 'allmydata.scripts.runner', 'create-node', '--nickname', name, '--introducer', introducer_furl, diff --git a/integration/util.py b/integration/util.py index 256fd68c1..b72e11c72 100644 --- a/integration/util.py +++ b/integration/util.py @@ -152,9 +152,9 @@ def _tahoe_runner_optional_coverage(proto, reactor, request, other_args): `--coverage` option if the `request` indicates we should. """ if request.config.getoption('coverage'): - args = [sys.executable, '-m', 'coverage', 'run', '-m', 'allmydata.scripts.runner', '--coverage'] + args = [sys.executable, '-b', '-m', 'coverage', 'run', '-m', 'allmydata.scripts.runner', '--coverage'] else: - args = [sys.executable, '-m', 'allmydata.scripts.runner'] + args = [sys.executable, '-b', '-m', 'allmydata.scripts.runner'] args += other_args return reactor.spawnProcess( proto, diff --git a/newsfragments/3619.minor b/newsfragments/3619.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3626.minor b/newsfragments/3626.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3652.removed b/newsfragments/3652.removed new file mode 100644 index 000000000..a3e964702 --- /dev/null +++ b/newsfragments/3652.removed @@ -0,0 +1 @@ +Removed support for the Account Server frontend authentication type. diff --git a/newsfragments/3672.minor b/newsfragments/3672.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3675.minor b/newsfragments/3675.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3678.minor b/newsfragments/3678.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3679.minor b/newsfragments/3679.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3681.installation b/newsfragments/3681.installation new file mode 100644 index 000000000..a697e6c60 --- /dev/null +++ b/newsfragments/3681.installation @@ -0,0 +1,3 @@ +Tahoe-LAFS CI now runs tests only on 32-bit Windows. Microsoft has +removed vcpython27 compiler downloads from their site, and Tahoe-LAFS +needs vcpython27 to build and install netifaces on 64-bit Windows. diff --git a/newsfragments/3687.minor b/newsfragments/3687.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3691.minor b/newsfragments/3691.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3692.minor b/newsfragments/3692.minor new file mode 100644 index 000000000..e69de29bb diff --git a/src/allmydata/__init__.py b/src/allmydata/__init__.py index b29868c05..333394fc5 100644 --- a/src/allmydata/__init__.py +++ b/src/allmydata/__init__.py @@ -8,7 +8,7 @@ from __future__ import division from __future__ import print_function from __future__ import unicode_literals -from future.utils import PY2 +from future.utils import PY2, PY3 if PY2: # Don't import future str() so we don't break Foolscap serialization on Python 2. from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401 @@ -62,3 +62,18 @@ standard_library.install_aliases() from ._monkeypatch import patch patch() del patch + + +# On Python 3, turn BytesWarnings into exceptions. This can have potential +# production impact... if BytesWarnings are actually present in the codebase. +# Given that this has been enabled before Python 3 Tahoe-LAFS was publicly +# released, no such code should exist, and this will ensure it doesn't get +# added either. +# +# Also note that BytesWarnings only happen if Python is run with -b option, so +# in practice this should only affect tests. +if PY3: + import warnings + # Error on BytesWarnings, to catch things like str(b""), but only for + # allmydata code. + warnings.filterwarnings("error", category=BytesWarning, module=".*allmydata.*") diff --git a/src/allmydata/client.py b/src/allmydata/client.py index 3bf976fe5..a6c45643f 100644 --- a/src/allmydata/client.py +++ b/src/allmydata/client.py @@ -116,7 +116,6 @@ _client_config = configutil.ValidConfiguration( ), "sftpd": ( "accounts.file", - "accounts.url", "enabled", "host_privkey_file", "host_pubkey_file", @@ -1042,13 +1041,12 @@ class _Client(node.Node, pollmixin.PollMixin): accountfile = self.config.get_config("sftpd", "accounts.file", None) if accountfile: accountfile = self.config.get_config_path(accountfile) - accounturl = self.config.get_config("sftpd", "accounts.url", None) sftp_portstr = self.config.get_config("sftpd", "port", "tcp:8022") pubkey_file = self.config.get_config("sftpd", "host_pubkey_file") privkey_file = self.config.get_config("sftpd", "host_privkey_file") from allmydata.frontends import sftpd - s = sftpd.SFTPServer(self, accountfile, accounturl, + s = sftpd.SFTPServer(self, accountfile, sftp_portstr, pubkey_file, privkey_file) s.setServiceParent(self) diff --git a/src/allmydata/frontends/auth.py b/src/allmydata/frontends/auth.py index 7f81572fe..f2ac99b8f 100644 --- a/src/allmydata/frontends/auth.py +++ b/src/allmydata/frontends/auth.py @@ -1,14 +1,10 @@ -import os - from zope.interface import implementer -from twisted.web.client import getPage from twisted.internet import defer from twisted.cred import error, checkers, credentials from twisted.conch.ssh import keys from twisted.conch.checkers import SSHPublicKeyChecker, InMemorySSHKeyDB from allmydata.util.dictutil import BytesKeyDict -from allmydata.util import base32 from allmydata.util.fileutil import abspath_expanduser_unicode @@ -86,54 +82,3 @@ class AccountFileChecker(object): d = defer.maybeDeferred(creds.checkPassword, correct) d.addCallback(self._cbPasswordMatch, str(creds.username)) return d - - -@implementer(checkers.ICredentialsChecker) -class AccountURLChecker(object): - credentialInterfaces = (credentials.IUsernamePassword,) - - def __init__(self, client, auth_url): - self.client = client - self.auth_url = auth_url - - def _cbPasswordMatch(self, rootcap, username): - return FTPAvatarID(username, rootcap) - - def post_form(self, username, password): - sepbase = base32.b2a(os.urandom(4)) - sep = "--" + sepbase - form = [] - form.append(sep) - fields = {"action": "authenticate", - "email": username, - "passwd": password, - } - for name, value in fields.iteritems(): - form.append('Content-Disposition: form-data; name="%s"' % name) - form.append('') - assert isinstance(value, str) - form.append(value) - form.append(sep) - form[-1] += "--" - body = "\r\n".join(form) + "\r\n" - headers = {"content-type": "multipart/form-data; boundary=%s" % sepbase, - } - return getPage(self.auth_url, method="POST", - postdata=body, headers=headers, - followRedirect=True, timeout=30) - - def _parse_response(self, res): - rootcap = res.strip() - if rootcap == "0": - raise error.UnauthorizedLogin - return rootcap - - def requestAvatarId(self, credentials): - # construct a POST to the login form. While this could theoretically - # be done with something like the stdlib 'email' package, I can't - # figure out how, so we just slam together a form manually. - d = self.post_form(credentials.username, credentials.password) - d.addCallback(self._parse_response) - d.addCallback(self._cbPasswordMatch, str(credentials.username)) - return d - diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index bc7196de6..17eca993e 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -1983,7 +1983,7 @@ class ShellSession(PrefixingLogMixin): components.registerAdapter(ShellSession, SFTPUserHandler, ISession) -from allmydata.frontends.auth import AccountURLChecker, AccountFileChecker, NeedRootcapLookupScheme +from allmydata.frontends.auth import AccountFileChecker, NeedRootcapLookupScheme @implementer(portal.IRealm) class Dispatcher(object): @@ -2000,7 +2000,7 @@ class Dispatcher(object): class SFTPServer(service.MultiService): name = "frontend:sftp" - def __init__(self, client, accountfile, accounturl, + def __init__(self, client, accountfile, sftp_portstr, pubkey_file, privkey_file): precondition(isinstance(accountfile, (str, type(None))), accountfile) precondition(isinstance(pubkey_file, str), pubkey_file) @@ -2013,12 +2013,9 @@ class SFTPServer(service.MultiService): if accountfile: c = AccountFileChecker(self, accountfile) p.registerChecker(c) - if accounturl: - c = AccountURLChecker(self, accounturl) - p.registerChecker(c) - if not accountfile and not accounturl: + if not accountfile: # we could leave this anonymous, with just the /uri/CAP form - raise NeedRootcapLookupScheme("must provide an account file or URL") + raise NeedRootcapLookupScheme("must provide an account file") pubkey = keys.Key.fromFile(pubkey_file.encode(get_filesystem_encoding())) privkey = keys.Key.fromFile(privkey_file.encode(get_filesystem_encoding())) diff --git a/src/allmydata/scripts/admin.py b/src/allmydata/scripts/admin.py index 50dde9e43..abe3d093c 100644 --- a/src/allmydata/scripts/admin.py +++ b/src/allmydata/scripts/admin.py @@ -1,5 +1,8 @@ from __future__ import print_function +from past.builtins import unicode +from six import ensure_binary + try: from allmydata.scripts.types_ import SubCommands except ImportError: @@ -22,8 +25,10 @@ def print_keypair(options): from allmydata.crypto import ed25519 out = options.stdout private_key, public_key = ed25519.create_signing_keypair() - print("private:", ed25519.string_from_signing_key(private_key), file=out) - print("public:", ed25519.string_from_verifying_key(public_key), file=out) + print("private:", unicode(ed25519.string_from_signing_key(private_key), "ascii"), + file=out) + print("public:", unicode(ed25519.string_from_verifying_key(public_key), "ascii"), + file=out) class DerivePubkeyOptions(BaseOptions): def parseArgs(self, privkey): @@ -45,9 +50,10 @@ def derive_pubkey(options): out = options.stdout from allmydata.crypto import ed25519 privkey_vs = options.privkey + privkey_vs = ensure_binary(privkey_vs) private_key, public_key = ed25519.signing_keypair_from_string(privkey_vs) - print("private:", ed25519.string_from_signing_key(private_key), file=out) - print("public:", ed25519.string_from_verifying_key(public_key), file=out) + print("private:", unicode(ed25519.string_from_signing_key(private_key), "ascii"), file=out) + print("public:", unicode(ed25519.string_from_verifying_key(public_key), "ascii"), file=out) return 0 class AdminCommand(BaseOptions): diff --git a/src/allmydata/scripts/cli.py b/src/allmydata/scripts/cli.py index 811ae7ef9..011dc3b21 100644 --- a/src/allmydata/scripts/cli.py +++ b/src/allmydata/scripts/cli.py @@ -1,5 +1,7 @@ from __future__ import print_function +from past.builtins import unicode + import os.path, re, fnmatch try: @@ -36,7 +38,7 @@ class FileStoreOptions(BaseOptions): # compute a node-url from the existing options, put in self['node-url'] if self['node-url']: - if (not isinstance(self['node-url'], basestring) + if (not isinstance(self['node-url'], (bytes, unicode)) or not NODEURL_RE.match(self['node-url'])): msg = ("--node-url is required to be a string and look like " "\"http://HOSTNAMEORADDR:PORT\", not: %r" % @@ -224,7 +226,7 @@ class CpOptions(FileStoreOptions): def parseArgs(self, *args): if len(args) < 2: raise usage.UsageError("cp requires at least two arguments") - self.sources = map(argv_to_unicode, args[:-1]) + self.sources = [argv_to_unicode(arg) for arg in args[:-1]] self.destination = argv_to_unicode(args[-1]) synopsis = "[options] FROM.. TO" @@ -435,7 +437,7 @@ class CheckOptions(FileStoreOptions): ("add-lease", None, "Add/renew lease on all shares."), ] def parseArgs(self, *locations): - self.locations = map(argv_to_unicode, locations) + self.locations = list(map(argv_to_unicode, locations)) synopsis = "[options] [ALIAS:PATH]" description = """ @@ -452,7 +454,7 @@ class DeepCheckOptions(FileStoreOptions): ("verbose", "v", "Be noisy about what is happening."), ] def parseArgs(self, *locations): - self.locations = map(argv_to_unicode, locations) + self.locations = list(map(argv_to_unicode, locations)) synopsis = "[options] [ALIAS:PATH]" description = """ diff --git a/src/allmydata/scripts/debug.py b/src/allmydata/scripts/debug.py index b8aeee91e..ba40519de 100644 --- a/src/allmydata/scripts/debug.py +++ b/src/allmydata/scripts/debug.py @@ -452,7 +452,7 @@ def dump_cap(options): from allmydata import uri from allmydata.util import base32 from base64 import b32decode - import urlparse, urllib + from urllib.parse import unquote, urlparse out = options.stdout cap = options.cap @@ -461,18 +461,18 @@ def dump_cap(options): nodeid = b32decode(options['nodeid'].upper()) secret = None if options['client-secret']: - secret = base32.a2b(options['client-secret']) + secret = base32.a2b(options['client-secret'].encode("ascii")) elif options['client-dir']: secretfile = os.path.join(options['client-dir'], "private", "secret") try: - secret = base32.a2b(open(secretfile, "r").read().strip()) + secret = base32.a2b(open(secretfile, "rb").read().strip()) except EnvironmentError: pass if cap.startswith("http"): - scheme, netloc, path, params, query, fragment = urlparse.urlparse(cap) + scheme, netloc, path, params, query, fragment = urlparse(cap) assert path.startswith("/uri/") - cap = urllib.unquote(path[len("/uri/"):]) + cap = unquote(path[len("/uri/"):]) u = uri.from_string(cap) @@ -485,19 +485,19 @@ def _dump_secrets(storage_index, secret, nodeid, out): if secret: crs = hashutil.my_renewal_secret_hash(secret) - print(" client renewal secret:", base32.b2a(crs), file=out) + print(" client renewal secret:", unicode(base32.b2a(crs), "ascii"), file=out) frs = hashutil.file_renewal_secret_hash(crs, storage_index) - print(" file renewal secret:", base32.b2a(frs), file=out) + print(" file renewal secret:", unicode(base32.b2a(frs), "ascii"), file=out) if nodeid: renew = hashutil.bucket_renewal_secret_hash(frs, nodeid) - print(" lease renewal secret:", base32.b2a(renew), file=out) + print(" lease renewal secret:", unicode(base32.b2a(renew), "ascii"), file=out) ccs = hashutil.my_cancel_secret_hash(secret) - print(" client cancel secret:", base32.b2a(ccs), file=out) + print(" client cancel secret:", unicode(base32.b2a(ccs), "ascii"), file=out) fcs = hashutil.file_cancel_secret_hash(ccs, storage_index) - print(" file cancel secret:", base32.b2a(fcs), file=out) + print(" file cancel secret:", unicode(base32.b2a(fcs), "ascii"), file=out) if nodeid: cancel = hashutil.bucket_cancel_secret_hash(fcs, nodeid) - print(" lease cancel secret:", base32.b2a(cancel), file=out) + print(" lease cancel secret:", unicode(base32.b2a(cancel), "ascii"), file=out) def dump_uri_instance(u, nodeid, secret, out, show_header=True): from allmydata import uri @@ -508,19 +508,19 @@ def dump_uri_instance(u, nodeid, secret, out, show_header=True): if isinstance(u, uri.CHKFileURI): if show_header: print("CHK File:", file=out) - print(" key:", base32.b2a(u.key), file=out) - print(" UEB hash:", base32.b2a(u.uri_extension_hash), file=out) + print(" key:", unicode(base32.b2a(u.key), "ascii"), file=out) + print(" UEB hash:", unicode(base32.b2a(u.uri_extension_hash), "ascii"), file=out) print(" size:", u.size, file=out) print(" k/N: %d/%d" % (u.needed_shares, u.total_shares), file=out) - print(" storage index:", si_b2a(u.get_storage_index()), file=out) + print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out) _dump_secrets(u.get_storage_index(), secret, nodeid, out) elif isinstance(u, uri.CHKFileVerifierURI): if show_header: print("CHK Verifier URI:", file=out) - print(" UEB hash:", base32.b2a(u.uri_extension_hash), file=out) + print(" UEB hash:", unicode(base32.b2a(u.uri_extension_hash), "ascii"), file=out) print(" size:", u.size, file=out) print(" k/N: %d/%d" % (u.needed_shares, u.total_shares), file=out) - print(" storage index:", si_b2a(u.get_storage_index()), file=out) + print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out) elif isinstance(u, uri.LiteralFileURI): if show_header: @@ -530,52 +530,52 @@ def dump_uri_instance(u, nodeid, secret, out, show_header=True): elif isinstance(u, uri.WriteableSSKFileURI): # SDMF if show_header: print("SDMF Writeable URI:", file=out) - print(" writekey:", base32.b2a(u.writekey), file=out) - print(" readkey:", base32.b2a(u.readkey), file=out) - print(" storage index:", si_b2a(u.get_storage_index()), file=out) - print(" fingerprint:", base32.b2a(u.fingerprint), file=out) + print(" writekey:", unicode(base32.b2a(u.writekey), "ascii"), file=out) + print(" readkey:", unicode(base32.b2a(u.readkey), "ascii"), file=out) + print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out) + print(" fingerprint:", unicode(base32.b2a(u.fingerprint), "ascii"), file=out) print(file=out) if nodeid: we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid) - print(" write_enabler:", base32.b2a(we), file=out) + print(" write_enabler:", unicode(base32.b2a(we), "ascii"), file=out) print(file=out) _dump_secrets(u.get_storage_index(), secret, nodeid, out) elif isinstance(u, uri.ReadonlySSKFileURI): if show_header: print("SDMF Read-only URI:", file=out) - print(" readkey:", base32.b2a(u.readkey), file=out) - print(" storage index:", si_b2a(u.get_storage_index()), file=out) - print(" fingerprint:", base32.b2a(u.fingerprint), file=out) + print(" readkey:", unicode(base32.b2a(u.readkey), "ascii"), file=out) + print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out) + print(" fingerprint:", unicode(base32.b2a(u.fingerprint), "ascii"), file=out) elif isinstance(u, uri.SSKVerifierURI): if show_header: print("SDMF Verifier URI:", file=out) - print(" storage index:", si_b2a(u.get_storage_index()), file=out) - print(" fingerprint:", base32.b2a(u.fingerprint), file=out) + print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out) + print(" fingerprint:", unicode(base32.b2a(u.fingerprint), "ascii"), file=out) elif isinstance(u, uri.WriteableMDMFFileURI): # MDMF if show_header: print("MDMF Writeable URI:", file=out) - print(" writekey:", base32.b2a(u.writekey), file=out) - print(" readkey:", base32.b2a(u.readkey), file=out) - print(" storage index:", si_b2a(u.get_storage_index()), file=out) - print(" fingerprint:", base32.b2a(u.fingerprint), file=out) + print(" writekey:", unicode(base32.b2a(u.writekey), "ascii"), file=out) + print(" readkey:", unicode(base32.b2a(u.readkey), "ascii"), file=out) + print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out) + print(" fingerprint:", unicode(base32.b2a(u.fingerprint), "ascii"), file=out) print(file=out) if nodeid: we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid) - print(" write_enabler:", base32.b2a(we), file=out) + print(" write_enabler:", unicode(base32.b2a(we), "ascii"), file=out) print(file=out) _dump_secrets(u.get_storage_index(), secret, nodeid, out) elif isinstance(u, uri.ReadonlyMDMFFileURI): if show_header: print("MDMF Read-only URI:", file=out) - print(" readkey:", base32.b2a(u.readkey), file=out) - print(" storage index:", si_b2a(u.get_storage_index()), file=out) - print(" fingerprint:", base32.b2a(u.fingerprint), file=out) + print(" readkey:", unicode(base32.b2a(u.readkey), "ascii"), file=out) + print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out) + print(" fingerprint:", unicode(base32.b2a(u.fingerprint), "ascii"), file=out) elif isinstance(u, uri.MDMFVerifierURI): if show_header: print("MDMF Verifier URI:", file=out) - print(" storage index:", si_b2a(u.get_storage_index()), file=out) - print(" fingerprint:", base32.b2a(u.fingerprint), file=out) + print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out) + print(" fingerprint:", unicode(base32.b2a(u.fingerprint), "ascii"), file=out) elif isinstance(u, uri.ImmutableDirectoryURI): # CHK-based directory diff --git a/src/allmydata/scripts/slow_operation.py b/src/allmydata/scripts/slow_operation.py index ce25e9667..b4b2f8196 100644 --- a/src/allmydata/scripts/slow_operation.py +++ b/src/allmydata/scripts/slow_operation.py @@ -1,12 +1,16 @@ from __future__ import print_function +from future.utils import PY3 +from past.builtins import unicode +from six import ensure_str + import os, time from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \ UnknownAliasError from allmydata.scripts.common_http import do_http, format_http_error from allmydata.util import base32 from allmydata.util.encodingutil import quote_output, is_printable_ascii -import urllib +from urllib.parse import quote as url_quote import json class SlowOperationRunner(object): @@ -14,7 +18,7 @@ class SlowOperationRunner(object): def run(self, options): stderr = options.stderr self.options = options - self.ophandle = ophandle = base32.b2a(os.urandom(16)) + self.ophandle = ophandle = ensure_str(base32.b2a(os.urandom(16))) nodeurl = options['node-url'] if not nodeurl.endswith("/"): nodeurl += "/" @@ -25,9 +29,10 @@ class SlowOperationRunner(object): except UnknownAliasError as e: e.display(stderr) return 1 + path = unicode(path, "utf-8") if path == '/': path = '' - url = nodeurl + "uri/%s" % urllib.quote(rootcap) + url = nodeurl + "uri/%s" % url_quote(rootcap) if path: url += "/" + escape_path(path) # todo: should it end with a slash? @@ -74,8 +79,13 @@ class SlowOperationRunner(object): if not data["finished"]: return False if self.options.get("raw"): + if PY3: + # need to write bytes! + stdout = stdout.buffer if is_printable_ascii(jdata): - print(jdata, file=stdout) + stdout.write(jdata) + stdout.write(b"\n") + stdout.flush() else: print("The JSON response contained unprintable characters:\n%s" % quote_output(jdata), file=stderr) return True diff --git a/src/allmydata/scripts/tahoe_check.py b/src/allmydata/scripts/tahoe_check.py index cef9e32be..82885d073 100644 --- a/src/allmydata/scripts/tahoe_check.py +++ b/src/allmydata/scripts/tahoe_check.py @@ -1,6 +1,6 @@ from __future__ import print_function -import urllib +from urllib.parse import quote as url_quote import json # Python 2 compatibility @@ -34,9 +34,10 @@ def check_location(options, where): except UnknownAliasError as e: e.display(stderr) return 1 + path = str(path, "utf-8") if path == '/': path = '' - url = nodeurl + "uri/%s" % urllib.quote(rootcap) + url = nodeurl + "uri/%s" % url_quote(rootcap) if path: url += "/" + escape_path(path) # todo: should it end with a slash? @@ -52,7 +53,8 @@ def check_location(options, where): if resp.status != 200: print(format_http_error("ERROR", resp), file=stderr) return 1 - jdata = resp.read() + jdata = resp.read().decode() + if options.get("raw"): stdout.write(jdata) stdout.write("\n") @@ -139,7 +141,7 @@ class DeepCheckOutput(LineOnlyReceiver, object): if self.in_error: print(quote_output(line, quotemarks=False), file=self.stderr) return - if line.startswith("ERROR:"): + if line.startswith(b"ERROR:"): self.in_error = True self.streamer.rc = 1 print(quote_output(line, quotemarks=False), file=self.stderr) @@ -202,7 +204,7 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver, object): if self.in_error: print(quote_output(line, quotemarks=False), file=self.stderr) return - if line.startswith("ERROR:"): + if line.startswith(b"ERROR:"): self.in_error = True self.streamer.rc = 1 print(quote_output(line, quotemarks=False), file=self.stderr) @@ -295,9 +297,10 @@ class DeepCheckStreamer(LineOnlyReceiver, object): except UnknownAliasError as e: e.display(stderr) return 1 + path = str(path, "utf-8") if path == '/': path = '' - url = nodeurl + "uri/%s" % urllib.quote(rootcap) + url = nodeurl + "uri/%s" % url_quote(rootcap) if path: url += "/" + escape_path(path) # todo: should it end with a slash? @@ -322,7 +325,7 @@ class DeepCheckStreamer(LineOnlyReceiver, object): if not chunk: break if self.options["raw"]: - stdout.write(chunk) + stdout.write(chunk.decode()) else: output.dataReceived(chunk) if not self.options["raw"]: diff --git a/src/allmydata/scripts/tahoe_cp.py b/src/allmydata/scripts/tahoe_cp.py index f7879f35c..fb86ff0ec 100644 --- a/src/allmydata/scripts/tahoe_cp.py +++ b/src/allmydata/scripts/tahoe_cp.py @@ -1,10 +1,12 @@ from __future__ import print_function +from past.builtins import unicode + import os.path -import urllib -import json +from urllib.parse import quote as url_quote from collections import defaultdict -from six.moves import cStringIO as StringIO +from io import BytesIO + from twisted.python.failure import Failure from allmydata.scripts.common import get_alias, escape_path, \ DefaultAliasMarker, TahoeError @@ -15,6 +17,7 @@ from allmydata.util.fileutil import abspath_expanduser_unicode, precondition_abs from allmydata.util.encodingutil import unicode_to_url, listdir_unicode, quote_output, \ quote_local_unicode_path, to_bytes from allmydata.util.assertutil import precondition, _assert +from allmydata.util import jsonbytes as json class MissingSourceError(TahoeError): @@ -61,8 +64,8 @@ def mkdir(targeturl): def make_tahoe_subdirectory(nodeurl, parent_writecap, name): url = nodeurl + "/".join(["uri", - urllib.quote(parent_writecap), - urllib.quote(unicode_to_url(name)), + url_quote(parent_writecap), + url_quote(unicode_to_url(name)), ]) + "?t=mkdir" resp = do_http("POST", url) if resp.status in (200, 201): @@ -198,13 +201,21 @@ class TahoeFileSource(object): def open(self, caps_only): if caps_only: - return StringIO(self.readcap) - url = self.nodeurl + "uri/" + urllib.quote(self.readcap) + return BytesIO(self.readcap) + url = self.nodeurl + "uri/" + url_quote(self.readcap) return GET_to_file(url) def bestcap(self): return self.writecap or self.readcap + +def seekable(file_like): + """Return whether the file-like object is seekable.""" + return hasattr(file_like, "seek") and ( + not hasattr(file_like, "seekable") or file_like.seekable() + ) + + class TahoeFileTarget(object): def __init__(self, nodeurl, mutable, writecap, readcap, url): self.nodeurl = nodeurl @@ -218,7 +229,7 @@ class TahoeFileTarget(object): assert self.url # our do_http() call currently requires a string or a filehandle with # a real .seek - if not hasattr(inf, "seek"): + if not seekable(inf): inf = inf.read() PUT(self.url, inf) # TODO: this always creates immutable files. We might want an option @@ -239,7 +250,7 @@ class TahoeDirectorySource(object): self.writecap = writecap self.readcap = readcap bestcap = writecap or readcap - url = self.nodeurl + "uri/%s" % urllib.quote(bestcap) + url = self.nodeurl + "uri/%s" % url_quote(bestcap) resp = do_http("GET", url + "?t=json") if resp.status != 200: raise HTTPError("Error examining source directory", resp) @@ -249,7 +260,7 @@ class TahoeDirectorySource(object): self.mutable = d.get("mutable", False) # older nodes don't provide it self.children_d = dict( [(unicode(name),value) for (name,value) - in d["children"].iteritems()] ) + in d["children"].items()] ) self.children = None def init_from_parsed(self, parsed): @@ -259,7 +270,7 @@ class TahoeDirectorySource(object): self.mutable = d.get("mutable", False) # older nodes don't provide it self.children_d = dict( [(unicode(name),value) for (name,value) - in d["children"].iteritems()] ) + in d["children"].items()] ) self.children = None def populate(self, recurse): @@ -304,7 +315,7 @@ class TahoeMissingTarget(object): def put_file(self, inf): # We want to replace this object in-place. - if not hasattr(inf, "seek"): + if not seekable(inf): inf = inf.read() PUT(self.url, inf) # TODO: this always creates immutable files. We might want an option @@ -329,14 +340,14 @@ class TahoeDirectoryTarget(object): self.mutable = d.get("mutable", False) # older nodes don't provide it self.children_d = dict( [(unicode(name),value) for (name,value) - in d["children"].iteritems()] ) + in d["children"].items()] ) self.children = None def init_from_grid(self, writecap, readcap): self.writecap = writecap self.readcap = readcap bestcap = writecap or readcap - url = self.nodeurl + "uri/%s" % urllib.quote(bestcap) + url = self.nodeurl + "uri/%s" % url_quote(bestcap) resp = do_http("GET", url + "?t=json") if resp.status != 200: raise HTTPError("Error examining target directory", resp) @@ -346,7 +357,7 @@ class TahoeDirectoryTarget(object): self.mutable = d.get("mutable", False) # older nodes don't provide it self.children_d = dict( [(unicode(name),value) for (name,value) - in d["children"].iteritems()] ) + in d["children"].items()] ) self.children = None def just_created(self, writecap): @@ -370,8 +381,8 @@ class TahoeDirectoryTarget(object): url = None if self.writecap: url = self.nodeurl + "/".join(["uri", - urllib.quote(self.writecap), - urllib.quote(unicode_to_url(name))]) + url_quote(self.writecap), + url_quote(unicode_to_url(name))]) self.children[name] = TahoeFileTarget(self.nodeurl, mutable, writecap, readcap, url) elif data[0] == "dirnode": @@ -415,7 +426,7 @@ class TahoeDirectoryTarget(object): def put_file(self, name, inf): precondition(isinstance(name, unicode), name) url = self.nodeurl + "uri" - if not hasattr(inf, "seek"): + if not seekable(inf): inf = inf.read() if self.children is None: @@ -439,7 +450,7 @@ class TahoeDirectoryTarget(object): def set_children(self): if not self.new_children: return - url = (self.nodeurl + "uri/" + urllib.quote(self.writecap) + url = (self.nodeurl + "uri/" + url_quote(self.writecap) + "?t=set_children") set_data = {} for (name, filecap) in self.new_children.items(): @@ -450,7 +461,7 @@ class TahoeDirectoryTarget(object): # TODO: think about how this affects forward-compatibility for # unknown caps set_data[name] = ["filenode", {"rw_uri": filecap}] - body = json.dumps(set_data) + body = json.dumps_bytes(set_data) POST(url, body) FileSources = (LocalFileSource, TahoeFileSource) @@ -603,7 +614,7 @@ class Copier(object): t = LocalFileTarget(pathname) # non-empty else: # this is a tahoe object - url = self.nodeurl + "uri/%s" % urllib.quote(rootcap) + url = self.nodeurl + "uri/%s" % url_quote(rootcap) if path: url += "/" + escape_path(path) @@ -656,7 +667,7 @@ class Copier(object): t = LocalFileSource(pathname, name) # non-empty else: # this is a tahoe object - url = self.nodeurl + "uri/%s" % urllib.quote(rootcap) + url = self.nodeurl + "uri/%s" % url_quote(rootcap) name = None if path: if path.endswith("/"): diff --git a/src/allmydata/scripts/tahoe_ls.py b/src/allmydata/scripts/tahoe_ls.py index 91665e77b..b3e09b699 100644 --- a/src/allmydata/scripts/tahoe_ls.py +++ b/src/allmydata/scripts/tahoe_ls.py @@ -27,6 +27,8 @@ def list(options): except UnknownAliasError as e: e.display(stderr) return 1 + + path = unicode(path, "utf-8") url = nodeurl + "uri/%s" % url_quote(rootcap) if path: # move where.endswith check here? @@ -45,10 +47,10 @@ def list(options): return resp.status data = resp.read() - if options['json']: # The webapi server should always output printable ASCII. if is_printable_ascii(data): + data = unicode(data, "ascii") print(data, file=stdout) return 0 else: @@ -70,7 +72,7 @@ def list(options): children = d['children'] else: # paths returned from get_alias are always valid UTF-8 - childname = path.split("/")[-1].decode('utf-8') + childname = path.split("/")[-1] children = {childname: (nodetype, d)} if "metadata" not in d: d["metadata"] = {} diff --git a/src/allmydata/scripts/tahoe_manifest.py b/src/allmydata/scripts/tahoe_manifest.py index 386cdd1ad..966583244 100644 --- a/src/allmydata/scripts/tahoe_manifest.py +++ b/src/allmydata/scripts/tahoe_manifest.py @@ -1,6 +1,10 @@ from __future__ import print_function -import urllib, json +from future.utils import PY3 +from past.builtins import unicode + +from urllib.parse import quote as url_quote +import json from twisted.protocols.basic import LineOnlyReceiver from allmydata.util.abbreviate import abbreviate_space_both from allmydata.scripts.slow_operation import SlowOperationRunner @@ -33,9 +37,10 @@ class ManifestStreamer(LineOnlyReceiver, object): except UnknownAliasError as e: e.display(stderr) return 1 + path = unicode(path, "utf-8") if path == '/': path = '' - url = nodeurl + "uri/%s" % urllib.quote(rootcap) + url = nodeurl + "uri/%s" % url_quote(rootcap) if path: url += "/" + escape_path(path) # todo: should it end with a slash? @@ -47,6 +52,9 @@ class ManifestStreamer(LineOnlyReceiver, object): #print("RESP", dir(resp)) # use Twisted to split this into lines self.in_error = False + # Writing bytes, so need binary stdout. + if PY3: + stdout = stdout.buffer while True: chunk = resp.read(100) if not chunk: @@ -63,7 +71,7 @@ class ManifestStreamer(LineOnlyReceiver, object): if self.in_error: print(quote_output(line, quotemarks=False), file=stderr) return - if line.startswith("ERROR:"): + if line.startswith(b"ERROR:"): self.in_error = True self.rc = 1 print(quote_output(line, quotemarks=False), file=stderr) diff --git a/src/allmydata/scripts/tahoe_mkdir.py b/src/allmydata/scripts/tahoe_mkdir.py index a76adc8fc..54e8ebe46 100644 --- a/src/allmydata/scripts/tahoe_mkdir.py +++ b/src/allmydata/scripts/tahoe_mkdir.py @@ -1,6 +1,8 @@ from __future__ import print_function -import urllib +from past.builtins import unicode + +from urllib.parse import quote as url_quote from allmydata.scripts.common_http import do_http, check_http_error from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, UnknownAliasError from allmydata.util.encodingutil import quote_output @@ -24,7 +26,7 @@ def mkdir(options): # create a new unlinked directory url = nodeurl + "uri?t=mkdir" if options["format"]: - url += "&format=%s" % urllib.quote(options['format']) + url += "&format=%s" % url_quote(options['format']) resp = do_http("POST", url) rc = check_http_error(resp, stderr) if rc: @@ -35,13 +37,14 @@ def mkdir(options): return 0 # create a new directory at the given location + path = unicode(path, "utf-8") if path.endswith("/"): path = path[:-1] # path must be "/".join([s.encode("utf-8") for s in segments]) - url = nodeurl + "uri/%s/%s?t=mkdir" % (urllib.quote(rootcap), - urllib.quote(path)) + url = nodeurl + "uri/%s/%s?t=mkdir" % (url_quote(rootcap), + url_quote(path)) if options['format']: - url += "&format=%s" % urllib.quote(options['format']) + url += "&format=%s" % url_quote(options['format']) resp = do_http("POST", url) check_http_error(resp, stderr) diff --git a/src/allmydata/scripts/tahoe_mv.py b/src/allmydata/scripts/tahoe_mv.py index 7d13ea72a..84f83edcd 100644 --- a/src/allmydata/scripts/tahoe_mv.py +++ b/src/allmydata/scripts/tahoe_mv.py @@ -1,7 +1,9 @@ from __future__ import print_function +from past.builtins import unicode + import re -import urllib +from urllib.parse import quote as url_quote import json from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \ UnknownAliasError @@ -25,7 +27,8 @@ def mv(options, mode="move"): except UnknownAliasError as e: e.display(stderr) return 1 - from_url = nodeurl + "uri/%s" % urllib.quote(rootcap) + from_path = unicode(from_path, "utf-8") + from_url = nodeurl + "uri/%s" % url_quote(rootcap) if from_path: from_url += "/" + escape_path(from_path) # figure out the source cap @@ -43,7 +46,8 @@ def mv(options, mode="move"): except UnknownAliasError as e: e.display(stderr) return 1 - to_url = nodeurl + "uri/%s" % urllib.quote(rootcap) + to_url = nodeurl + "uri/%s" % url_quote(rootcap) + path = unicode(path, "utf-8") if path: to_url += "/" + escape_path(path) diff --git a/src/allmydata/scripts/tahoe_put.py b/src/allmydata/scripts/tahoe_put.py index 8d87408dc..8db705d01 100644 --- a/src/allmydata/scripts/tahoe_put.py +++ b/src/allmydata/scripts/tahoe_put.py @@ -1,7 +1,10 @@ from __future__ import print_function -from six.moves import cStringIO as StringIO -import urllib +from future.utils import PY2 +from past.builtins import unicode + +from io import BytesIO +from urllib.parse import quote as url_quote from allmydata.scripts.common_http import do_http, format_http_success, format_http_error from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \ @@ -46,19 +49,20 @@ def put(options): # FIXME: don't hardcode cap format. if to_file.startswith("URI:MDMF:") or to_file.startswith("URI:SSK:"): - url = nodeurl + "uri/%s" % urllib.quote(to_file) + url = nodeurl + "uri/%s" % url_quote(to_file) else: try: rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS) except UnknownAliasError as e: e.display(stderr) return 1 + path = unicode(path, "utf-8") if path.startswith("/"): suggestion = to_file.replace(u"/", u"", 1) print("Error: The remote filename must not start with a slash", file=stderr) print("Please try again, perhaps with %s" % quote_output(suggestion), file=stderr) return 1 - url = nodeurl + "uri/%s/" % urllib.quote(rootcap) + url = nodeurl + "uri/%s/" % url_quote(rootcap) if path: url += escape_path(path) else: @@ -80,8 +84,13 @@ def put(options): # Content-Length field. So we currently must copy it. if verbosity > 0: print("waiting for file data on stdin..", file=stderr) - data = stdin.read() - infileobj = StringIO(data) + # We're uploading arbitrary files, so this had better be bytes: + if PY2: + stdinb = stdin + else: + stdinb = stdin.buffer + data = stdinb.read() + infileobj = BytesIO(data) resp = do_http("PUT", url, infileobj) diff --git a/src/allmydata/scripts/tahoe_unlink.py b/src/allmydata/scripts/tahoe_unlink.py index bc1d43c9e..1ec92c69e 100644 --- a/src/allmydata/scripts/tahoe_unlink.py +++ b/src/allmydata/scripts/tahoe_unlink.py @@ -1,6 +1,6 @@ from __future__ import print_function -import urllib +from urllib.parse import quote as url_quote from allmydata.scripts.common_http import do_http, format_http_success, format_http_error from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \ UnknownAliasError @@ -27,7 +27,7 @@ def unlink(options, command="unlink"): 'tahoe %s' can only unlink directory entries, so a path must be given.""" % (command,), file=stderr) return 1 - url = nodeurl + "uri/%s" % urllib.quote(rootcap) + url = nodeurl + "uri/%s" % url_quote(rootcap) url += "/" + escape_path(path) resp = do_http("DELETE", url) diff --git a/src/allmydata/scripts/tahoe_webopen.py b/src/allmydata/scripts/tahoe_webopen.py index a7b7ca7e1..0292e0d40 100644 --- a/src/allmydata/scripts/tahoe_webopen.py +++ b/src/allmydata/scripts/tahoe_webopen.py @@ -1,7 +1,10 @@ +from past.builtins import unicode + +from urllib.parse import quote as url_quote from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \ UnknownAliasError -import urllib + def webopen(options, opener=None): nodeurl = options['node-url'] @@ -15,9 +18,10 @@ def webopen(options, opener=None): except UnknownAliasError as e: e.display(stderr) return 1 + path = unicode(path, "utf-8") if path == '/': path = '' - url = nodeurl + "uri/%s" % urllib.quote(rootcap) + url = nodeurl + "uri/%s" % url_quote(rootcap) if path: url += "/" + escape_path(path) else: diff --git a/src/allmydata/test/__init__.py b/src/allmydata/test/__init__.py index c75f8d003..893aa15ce 100644 --- a/src/allmydata/test/__init__.py +++ b/src/allmydata/test/__init__.py @@ -20,11 +20,10 @@ from __future__ import division from __future__ import print_function from __future__ import unicode_literals -from future.utils import PY2, PY3 +from future.utils import PY2 if PY2: from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -import warnings from traceback import extract_stack, format_list from foolscap.pb import Listener @@ -33,11 +32,6 @@ from twisted.application import service from foolscap.logging.incident import IncidentQualifier -if PY3: - # Error on BytesWarnings, to catch things like str(b""), but only for - # allmydata code. - warnings.filterwarnings("error", category=BytesWarning, module="allmydata.*") - class NonQualifier(IncidentQualifier, object): def check_event(self, ev): @@ -131,5 +125,5 @@ if sys.platform == "win32": initialize() from eliot import to_file -from allmydata.util.jsonbytes import BytesJSONEncoder -to_file(open("eliot.log", "wb"), encoder=BytesJSONEncoder) +from allmydata.util.jsonbytes import AnyBytesJSONEncoder +to_file(open("eliot.log", "wb"), encoder=AnyBytesJSONEncoder) diff --git a/src/allmydata/test/cli/common.py b/src/allmydata/test/cli/common.py index 8796f815f..ed066c6b6 100644 --- a/src/allmydata/test/cli/common.py +++ b/src/allmydata/test/cli/common.py @@ -1,9 +1,22 @@ -from six import ensure_str +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + +from six import ensure_str, ensure_text from ...scripts import runner from ..common_util import ReallyEqualMixin, run_cli, run_cli_unicode def parse_options(basedir, command, args): + args = [ensure_text(s) for s in args] o = runner.Options() o.parseOptions(["--node-directory", basedir, command] + args) while hasattr(o, "subOptions"): diff --git a/src/allmydata/test/cli/test_check.py b/src/allmydata/test/cli/test_check.py index 8cf963da6..e01dcc4cb 100644 --- a/src/allmydata/test/cli/test_check.py +++ b/src/allmydata/test/cli/test_check.py @@ -1,3 +1,13 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from six import ensure_text + import os.path import json from twisted.trial import unittest @@ -5,20 +15,21 @@ from six.moves import cStringIO as StringIO from allmydata import uri from allmydata.util import base32 -from allmydata.util.encodingutil import quote_output, to_bytes +from allmydata.util.encodingutil import to_bytes from allmydata.mutable.publish import MutableData from allmydata.immutable import upload from allmydata.scripts import debug from ..no_network import GridTestMixin from .common import CLITestMixin + class Check(GridTestMixin, CLITestMixin, unittest.TestCase): def test_check(self): self.basedir = "cli/Check/check" self.set_up_grid() c0 = self.g.clients[0] - DATA = "data" * 100 + DATA = b"data" * 100 DATA_uploadable = MutableData(DATA) d = c0.create_mutable_file(DATA_uploadable) def _stash_uri(n): @@ -28,7 +39,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): d.addCallback(lambda ign: self.do_cli("check", self.uri)) def _check1(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) lines = out.splitlines() self.failUnless("Summary: Healthy" in lines, out) @@ -38,14 +49,14 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): d.addCallback(lambda ign: self.do_cli("check", "--raw", self.uri)) def _check2(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) data = json.loads(out) - self.failUnlessReallyEqual(to_bytes(data["summary"]), "Healthy") + self.failUnlessReallyEqual(to_bytes(data["summary"]), b"Healthy") self.failUnlessReallyEqual(data["results"]["healthy"], True) d.addCallback(_check2) - d.addCallback(lambda ign: c0.upload(upload.Data("literal", convergence=""))) + d.addCallback(lambda ign: c0.upload(upload.Data(b"literal", convergence=b""))) def _stash_lit_uri(n): self.lit_uri = n.get_uri() d.addCallback(_stash_lit_uri) @@ -53,7 +64,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): d.addCallback(lambda ign: self.do_cli("check", self.lit_uri)) def _check_lit(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) lines = out.splitlines() self.failUnless("Summary: Healthy (LIT)" in lines, out) @@ -62,13 +73,13 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): d.addCallback(lambda ign: self.do_cli("check", "--raw", self.lit_uri)) def _check_lit_raw(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) data = json.loads(out) self.failUnlessReallyEqual(data["results"]["healthy"], True) d.addCallback(_check_lit_raw) - d.addCallback(lambda ign: c0.create_immutable_dirnode({}, convergence="")) + d.addCallback(lambda ign: c0.create_immutable_dirnode({}, convergence=b"")) def _stash_lit_dir_uri(n): self.lit_dir_uri = n.get_uri() d.addCallback(_stash_lit_dir_uri) @@ -89,16 +100,16 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): cso.parseOptions([shares[1][2]]) storage_index = uri.from_string(self.uri).get_storage_index() self._corrupt_share_line = " server %s, SI %s, shnum %d" % \ - (base32.b2a(shares[1][1]), - base32.b2a(storage_index), - shares[1][0]) + (str(base32.b2a(shares[1][1]), "ascii"), + str(base32.b2a(storage_index), "ascii"), + shares[1][0]) debug.corrupt_share(cso) d.addCallback(_clobber_shares) d.addCallback(lambda ign: self.do_cli("check", "--verify", self.uri)) def _check3(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) lines = out.splitlines() summary = [l for l in lines if l.startswith("Summary")][0] @@ -112,7 +123,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): d.addCallback(lambda ign: self.do_cli("check", "--verify", "--raw", self.uri)) def _check3_raw(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) data = json.loads(out) self.failUnlessReallyEqual(data["results"]["healthy"], False) @@ -126,7 +137,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): self.do_cli("check", "--verify", "--repair", self.uri)) def _check4(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) lines = out.splitlines() self.failUnless("Summary: not healthy" in lines, out) @@ -140,7 +151,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): self.do_cli("check", "--verify", "--repair", self.uri)) def _check5(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) lines = out.splitlines() self.failUnless("Summary: healthy" in lines, out) @@ -156,14 +167,14 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): c0 = self.g.clients[0] self.uris = {} self.fileurls = {} - DATA = "data" * 100 - quoted_good = quote_output(u"g\u00F6\u00F6d") + DATA = b"data" * 100 + quoted_good = u"'g\u00F6\u00F6d'" d = c0.create_dirnode() def _stash_root_and_create_file(n): self.rootnode = n self.rooturi = n.get_uri() - return n.add_file(u"g\u00F6\u00F6d", upload.Data(DATA, convergence="")) + return n.add_file(u"g\u00F6\u00F6d", upload.Data(DATA, convergence=b"")) d.addCallback(_stash_root_and_create_file) def _stash_uri(fn, which): self.uris[which] = fn.get_uri() @@ -171,18 +182,18 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): d.addCallback(_stash_uri, u"g\u00F6\u00F6d") d.addCallback(lambda ign: self.rootnode.add_file(u"small", - upload.Data("literal", - convergence=""))) + upload.Data(b"literal", + convergence=b""))) d.addCallback(_stash_uri, "small") d.addCallback(lambda ign: - c0.create_mutable_file(MutableData(DATA+"1"))) + c0.create_mutable_file(MutableData(DATA+b"1"))) d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn)) d.addCallback(_stash_uri, "mutable") d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi)) def _check1(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) lines = out.splitlines() self.failUnless("done: 4 objects checked, 4 healthy, 0 unhealthy" @@ -198,8 +209,9 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): self.rooturi)) def _check2(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) + out = ensure_text(out) lines = out.splitlines() self.failUnless("'': Healthy" in lines, out) self.failUnless("'small': Healthy (LIT)" in lines, out) @@ -212,7 +224,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): d.addCallback(lambda ign: self.do_cli("stats", self.rooturi)) def _check_stats(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) lines = out.splitlines() self.failUnlessIn(" count-immutable-files: 1", lines) @@ -236,8 +248,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): cso.parseOptions([shares[1][2]]) storage_index = uri.from_string(self.uris["mutable"]).get_storage_index() self._corrupt_share_line = " corrupt: server %s, SI %s, shnum %d" % \ - (base32.b2a(shares[1][1]), - base32.b2a(storage_index), + (str(base32.b2a(shares[1][1]), "ascii"), + str(base32.b2a(storage_index), "ascii"), shares[1][0]) debug.corrupt_share(cso) d.addCallback(_clobber_shares) @@ -251,8 +263,9 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): self.do_cli("deep-check", "--verbose", self.rooturi)) def _check3(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) + out = ensure_text(out) lines = out.splitlines() self.failUnless("'': Healthy" in lines, out) self.failUnless("'small': Healthy (LIT)" in lines, out) @@ -268,8 +281,9 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): self.rooturi)) def _check4(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) + out = ensure_text(out) lines = out.splitlines() self.failUnless("'': Healthy" in lines, out) self.failUnless("'small': Healthy (LIT)" in lines, out) @@ -287,7 +301,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): self.rooturi)) def _check5(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) lines = out.splitlines() units = [json.loads(line) for line in lines] @@ -301,8 +315,9 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): self.rooturi)) def _check6(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(rc, 0) + out = ensure_text(out) lines = out.splitlines() self.failUnless("'': healthy" in lines, out) self.failUnless("'small': healthy" in lines, out) @@ -322,10 +337,10 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"subdir")) d.addCallback(_stash_uri, "subdir") d.addCallback(lambda fn: - fn.add_file(u"subfile", upload.Data(DATA+"2", ""))) + fn.add_file(u"subfile", upload.Data(DATA+b"2", b""))) d.addCallback(lambda ign: self.delete_shares_numbered(self.uris["subdir"], - range(10))) + list(range(10)))) # root # rootg\u00F6\u00F6d/ @@ -340,7 +355,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): self.failIfEqual(rc, 0) self.failUnlessIn("ERROR: UnrecoverableFileError", err) # the fatal directory should still show up, as the last line - self.failUnlessIn(" subdir\n", out) + self.failUnlessIn(" subdir\n", ensure_text(out)) d.addCallback(_manifest_failed) d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi)) @@ -379,7 +394,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): (rc, out, err) = args self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(len(out), 0, out) d.addCallback(_check) d.addCallback(lambda ign: self.do_cli("deep-check")) d.addCallback(_check) @@ -396,7 +411,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) self.failUnlessIn("nonexistent", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(len(out), 0, out) d.addCallback(_check) return d @@ -416,10 +431,10 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): def _check(args): (rc, out, err) = args self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) #Ensure healthy appears for each uri - self.failUnlessIn("Healthy", out[:len(out)/2]) - self.failUnlessIn("Healthy", out[len(out)/2:]) + self.failUnlessIn("Healthy", out[:len(out)//2]) + self.failUnlessIn("Healthy", out[len(out)//2:]) d.addCallback(_check) d.addCallback(lambda ign: self.do_cli("check", self.uriList[0], "nonexistent:")) diff --git a/src/allmydata/test/cli/test_cli.py b/src/allmydata/test/cli/test_cli.py index 2b1bc1c86..8a9b4dfd6 100644 --- a/src/allmydata/test/cli/test_cli.py +++ b/src/allmydata/test/cli/test_cli.py @@ -1,8 +1,23 @@ -import os.path +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from six.moves import cStringIO as StringIO -import urllib, sys +from six import ensure_text, ensure_str + +import os.path +import sys import re from mock import patch, Mock +from urllib.parse import quote as url_quote from twisted.trial import unittest from twisted.python.monkey import MonkeyPatcher @@ -44,6 +59,7 @@ from allmydata.util.encodingutil import listdir_unicode, get_io_encoding class CLI(CLITestMixin, unittest.TestCase): def _dump_cap(self, *args): + args = [ensure_text(s) for s in args] config = debug.DumpCapOptions() config.stdout,config.stderr = StringIO(), StringIO() config.parseOptions(args) @@ -53,8 +69,8 @@ class CLI(CLITestMixin, unittest.TestCase): return output def test_dump_cap_chk(self): - key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" - uri_extension_hash = hashutil.uri_extension_hash("stuff") + key = b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + uri_extension_hash = hashutil.uri_extension_hash(b"stuff") needed_shares = 25 total_shares = 100 size = 1234 @@ -75,14 +91,14 @@ class CLI(CLITestMixin, unittest.TestCase): u.to_string()) self.failUnless("client renewal secret: znxmki5zdibb5qlt46xbdvk2t55j7hibejq3i5ijyurkr6m6jkhq" in output, output) - output = self._dump_cap(u.get_verify_cap().to_string()) + output = self._dump_cap(str(u.get_verify_cap().to_string(), "ascii")) self.failIf("key: " in output, output) self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output) self.failUnless("size: 1234" in output, output) self.failUnless("k/N: 25/100" in output, output) self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output) - prefixed_u = "http://127.0.0.1/uri/%s" % urllib.quote(u.to_string()) + prefixed_u = "http://127.0.0.1/uri/%s" % url_quote(u.to_string()) output = self._dump_cap(prefixed_u) self.failUnless("CHK File:" in output, output) self.failUnless("key: aaaqeayeaudaocajbifqydiob4" in output, output) @@ -92,14 +108,14 @@ class CLI(CLITestMixin, unittest.TestCase): self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output) def test_dump_cap_lit(self): - u = uri.LiteralFileURI("this is some data") + u = uri.LiteralFileURI(b"this is some data") output = self._dump_cap(u.to_string()) self.failUnless("Literal File URI:" in output, output) self.failUnless("data: 'this is some data'" in output, output) def test_dump_cap_sdmf(self): - writekey = "\x01" * 16 - fingerprint = "\xfe" * 32 + writekey = b"\x01" * 16 + fingerprint = b"\xfe" * 32 u = uri.WriteableSSKFileURI(writekey, fingerprint) output = self._dump_cap(u.to_string()) @@ -149,8 +165,8 @@ class CLI(CLITestMixin, unittest.TestCase): self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output) def test_dump_cap_mdmf(self): - writekey = "\x01" * 16 - fingerprint = "\xfe" * 32 + writekey = b"\x01" * 16 + fingerprint = b"\xfe" * 32 u = uri.WriteableMDMFFileURI(writekey, fingerprint) output = self._dump_cap(u.to_string()) @@ -201,8 +217,8 @@ class CLI(CLITestMixin, unittest.TestCase): def test_dump_cap_chk_directory(self): - key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" - uri_extension_hash = hashutil.uri_extension_hash("stuff") + key = b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + uri_extension_hash = hashutil.uri_extension_hash(b"stuff") needed_shares = 25 total_shares = 100 size = 1234 @@ -235,8 +251,8 @@ class CLI(CLITestMixin, unittest.TestCase): self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output) def test_dump_cap_sdmf_directory(self): - writekey = "\x01" * 16 - fingerprint = "\xfe" * 32 + writekey = b"\x01" * 16 + fingerprint = b"\xfe" * 32 u1 = uri.WriteableSSKFileURI(writekey, fingerprint) u = uri.DirectoryURI(u1) @@ -279,8 +295,8 @@ class CLI(CLITestMixin, unittest.TestCase): self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output) def test_dump_cap_mdmf_directory(self): - writekey = "\x01" * 16 - fingerprint = "\xfe" * 32 + writekey = b"\x01" * 16 + fingerprint = b"\xfe" * 32 u1 = uri.WriteableMDMFFileURI(writekey, fingerprint) u = uri.MDMFDirectoryURI(u1) @@ -340,7 +356,7 @@ class CLI(CLITestMixin, unittest.TestCase): fileutil.write("cli/test_catalog_shares/node1/storage/shares/mq/not-a-dir", "") # write a bogus share that looks a little bit like CHK fileutil.write(os.path.join(sharedir, "8"), - "\x00\x00\x00\x01" + "\xff" * 200) # this triggers an assert + b"\x00\x00\x00\x01" + b"\xff" * 200) # this triggers an assert nodedir2 = "cli/test_catalog_shares/node2" fileutil.make_dirs(nodedir2) @@ -348,7 +364,7 @@ class CLI(CLITestMixin, unittest.TestCase): # now make sure that the 'catalog-shares' commands survives the error out, err = self._catalog_shares(nodedir1, nodedir2) - self.failUnlessReallyEqual(out, "", out) + self.assertEqual(out, "") self.failUnless("Error processing " in err, "didn't see 'error processing' in '%s'" % err) #self.failUnless(nodedir1 in err, @@ -361,71 +377,71 @@ class CLI(CLITestMixin, unittest.TestCase): "didn't see 'mqfblse6m5a6dh45isu2cg7oji' in '%s'" % err) def test_alias(self): - def s128(c): return base32.b2a(c*(128/8)) - def s256(c): return base32.b2a(c*(256/8)) - TA = "URI:DIR2:%s:%s" % (s128("T"), s256("T")) - WA = "URI:DIR2:%s:%s" % (s128("W"), s256("W")) - CA = "URI:DIR2:%s:%s" % (s128("C"), s256("C")) + def s128(c): return base32.b2a(c*(128//8)) + def s256(c): return base32.b2a(c*(256//8)) + TA = b"URI:DIR2:%s:%s" % (s128(b"T"), s256(b"T")) + WA = b"URI:DIR2:%s:%s" % (s128(b"W"), s256(b"W")) + CA = b"URI:DIR2:%s:%s" % (s128(b"C"), s256(b"C")) aliases = {"tahoe": TA, "work": WA, "c": CA} def ga1(path): return get_alias(aliases, path, u"tahoe") uses_lettercolon = common.platform_uses_lettercolon_drivename() - self.failUnlessReallyEqual(ga1(u"bare"), (TA, "bare")) - self.failUnlessReallyEqual(ga1(u"baredir/file"), (TA, "baredir/file")) - self.failUnlessReallyEqual(ga1(u"baredir/file:7"), (TA, "baredir/file:7")) - self.failUnlessReallyEqual(ga1(u"tahoe:"), (TA, "")) - self.failUnlessReallyEqual(ga1(u"tahoe:file"), (TA, "file")) - self.failUnlessReallyEqual(ga1(u"tahoe:dir/file"), (TA, "dir/file")) - self.failUnlessReallyEqual(ga1(u"work:"), (WA, "")) - self.failUnlessReallyEqual(ga1(u"work:file"), (WA, "file")) - self.failUnlessReallyEqual(ga1(u"work:dir/file"), (WA, "dir/file")) + self.failUnlessReallyEqual(ga1(u"bare"), (TA, b"bare")) + self.failUnlessReallyEqual(ga1(u"baredir/file"), (TA, b"baredir/file")) + self.failUnlessReallyEqual(ga1(u"baredir/file:7"), (TA, b"baredir/file:7")) + self.failUnlessReallyEqual(ga1(u"tahoe:"), (TA, b"")) + self.failUnlessReallyEqual(ga1(u"tahoe:file"), (TA, b"file")) + self.failUnlessReallyEqual(ga1(u"tahoe:dir/file"), (TA, b"dir/file")) + self.failUnlessReallyEqual(ga1(u"work:"), (WA, b"")) + self.failUnlessReallyEqual(ga1(u"work:file"), (WA, b"file")) + self.failUnlessReallyEqual(ga1(u"work:dir/file"), (WA, b"dir/file")) # default != None means we really expect a tahoe path, regardless of # whether we're on windows or not. This is what 'tahoe get' uses. - self.failUnlessReallyEqual(ga1(u"c:"), (CA, "")) - self.failUnlessReallyEqual(ga1(u"c:file"), (CA, "file")) - self.failUnlessReallyEqual(ga1(u"c:dir/file"), (CA, "dir/file")) - self.failUnlessReallyEqual(ga1(u"URI:stuff"), ("URI:stuff", "")) - self.failUnlessReallyEqual(ga1(u"URI:stuff/file"), ("URI:stuff", "file")) - self.failUnlessReallyEqual(ga1(u"URI:stuff:./file"), ("URI:stuff", "file")) - self.failUnlessReallyEqual(ga1(u"URI:stuff/dir/file"), ("URI:stuff", "dir/file")) - self.failUnlessReallyEqual(ga1(u"URI:stuff:./dir/file"), ("URI:stuff", "dir/file")) + self.failUnlessReallyEqual(ga1(u"c:"), (CA, b"")) + self.failUnlessReallyEqual(ga1(u"c:file"), (CA, b"file")) + self.failUnlessReallyEqual(ga1(u"c:dir/file"), (CA, b"dir/file")) + self.failUnlessReallyEqual(ga1(u"URI:stuff"), (b"URI:stuff", b"")) + self.failUnlessReallyEqual(ga1(u"URI:stuff/file"), (b"URI:stuff", b"file")) + self.failUnlessReallyEqual(ga1(u"URI:stuff:./file"), (b"URI:stuff", b"file")) + self.failUnlessReallyEqual(ga1(u"URI:stuff/dir/file"), (b"URI:stuff", b"dir/file")) + self.failUnlessReallyEqual(ga1(u"URI:stuff:./dir/file"), (b"URI:stuff", b"dir/file")) self.failUnlessRaises(common.UnknownAliasError, ga1, u"missing:") self.failUnlessRaises(common.UnknownAliasError, ga1, u"missing:dir") self.failUnlessRaises(common.UnknownAliasError, ga1, u"missing:dir/file") def ga2(path): return get_alias(aliases, path, None) - self.failUnlessReallyEqual(ga2(u"bare"), (DefaultAliasMarker, "bare")) + self.failUnlessReallyEqual(ga2(u"bare"), (DefaultAliasMarker, b"bare")) self.failUnlessReallyEqual(ga2(u"baredir/file"), - (DefaultAliasMarker, "baredir/file")) + (DefaultAliasMarker, b"baredir/file")) self.failUnlessReallyEqual(ga2(u"baredir/file:7"), - (DefaultAliasMarker, "baredir/file:7")) + (DefaultAliasMarker, b"baredir/file:7")) self.failUnlessReallyEqual(ga2(u"baredir/sub:1/file:7"), - (DefaultAliasMarker, "baredir/sub:1/file:7")) - self.failUnlessReallyEqual(ga2(u"tahoe:"), (TA, "")) - self.failUnlessReallyEqual(ga2(u"tahoe:file"), (TA, "file")) - self.failUnlessReallyEqual(ga2(u"tahoe:dir/file"), (TA, "dir/file")) + (DefaultAliasMarker, b"baredir/sub:1/file:7")) + self.failUnlessReallyEqual(ga2(u"tahoe:"), (TA, b"")) + self.failUnlessReallyEqual(ga2(u"tahoe:file"), (TA, b"file")) + self.failUnlessReallyEqual(ga2(u"tahoe:dir/file"), (TA, b"dir/file")) # on windows, we really want c:foo to indicate a local file. # default==None is what 'tahoe cp' uses. if uses_lettercolon: - self.failUnlessReallyEqual(ga2(u"c:"), (DefaultAliasMarker, "c:")) - self.failUnlessReallyEqual(ga2(u"c:file"), (DefaultAliasMarker, "c:file")) + self.failUnlessReallyEqual(ga2(u"c:"), (DefaultAliasMarker, b"c:")) + self.failUnlessReallyEqual(ga2(u"c:file"), (DefaultAliasMarker, b"c:file")) self.failUnlessReallyEqual(ga2(u"c:dir/file"), - (DefaultAliasMarker, "c:dir/file")) + (DefaultAliasMarker, b"c:dir/file")) else: - self.failUnlessReallyEqual(ga2(u"c:"), (CA, "")) - self.failUnlessReallyEqual(ga2(u"c:file"), (CA, "file")) - self.failUnlessReallyEqual(ga2(u"c:dir/file"), (CA, "dir/file")) - self.failUnlessReallyEqual(ga2(u"work:"), (WA, "")) - self.failUnlessReallyEqual(ga2(u"work:file"), (WA, "file")) - self.failUnlessReallyEqual(ga2(u"work:dir/file"), (WA, "dir/file")) - self.failUnlessReallyEqual(ga2(u"URI:stuff"), ("URI:stuff", "")) - self.failUnlessReallyEqual(ga2(u"URI:stuff/file"), ("URI:stuff", "file")) - self.failUnlessReallyEqual(ga2(u"URI:stuff:./file"), ("URI:stuff", "file")) - self.failUnlessReallyEqual(ga2(u"URI:stuff/dir/file"), ("URI:stuff", "dir/file")) - self.failUnlessReallyEqual(ga2(u"URI:stuff:./dir/file"), ("URI:stuff", "dir/file")) + self.failUnlessReallyEqual(ga2(u"c:"), (CA, b"")) + self.failUnlessReallyEqual(ga2(u"c:file"), (CA, b"file")) + self.failUnlessReallyEqual(ga2(u"c:dir/file"), (CA, b"dir/file")) + self.failUnlessReallyEqual(ga2(u"work:"), (WA, b"")) + self.failUnlessReallyEqual(ga2(u"work:file"), (WA, b"file")) + self.failUnlessReallyEqual(ga2(u"work:dir/file"), (WA, b"dir/file")) + self.failUnlessReallyEqual(ga2(u"URI:stuff"), (b"URI:stuff", b"")) + self.failUnlessReallyEqual(ga2(u"URI:stuff/file"), (b"URI:stuff", b"file")) + self.failUnlessReallyEqual(ga2(u"URI:stuff:./file"), (b"URI:stuff", b"file")) + self.failUnlessReallyEqual(ga2(u"URI:stuff/dir/file"), (b"URI:stuff", b"dir/file")) + self.failUnlessReallyEqual(ga2(u"URI:stuff:./dir/file"), (b"URI:stuff", b"dir/file")) self.failUnlessRaises(common.UnknownAliasError, ga2, u"missing:") self.failUnlessRaises(common.UnknownAliasError, ga2, u"missing:dir") self.failUnlessRaises(common.UnknownAliasError, ga2, u"missing:dir/file") @@ -438,26 +454,26 @@ class CLI(CLITestMixin, unittest.TestCase): finally: common.pretend_platform_uses_lettercolon = old return retval - self.failUnlessReallyEqual(ga3(u"bare"), (DefaultAliasMarker, "bare")) + self.failUnlessReallyEqual(ga3(u"bare"), (DefaultAliasMarker, b"bare")) self.failUnlessReallyEqual(ga3(u"baredir/file"), - (DefaultAliasMarker, "baredir/file")) + (DefaultAliasMarker, b"baredir/file")) self.failUnlessReallyEqual(ga3(u"baredir/file:7"), - (DefaultAliasMarker, "baredir/file:7")) + (DefaultAliasMarker, b"baredir/file:7")) self.failUnlessReallyEqual(ga3(u"baredir/sub:1/file:7"), - (DefaultAliasMarker, "baredir/sub:1/file:7")) - self.failUnlessReallyEqual(ga3(u"tahoe:"), (TA, "")) - self.failUnlessReallyEqual(ga3(u"tahoe:file"), (TA, "file")) - self.failUnlessReallyEqual(ga3(u"tahoe:dir/file"), (TA, "dir/file")) - self.failUnlessReallyEqual(ga3(u"c:"), (DefaultAliasMarker, "c:")) - self.failUnlessReallyEqual(ga3(u"c:file"), (DefaultAliasMarker, "c:file")) + (DefaultAliasMarker, b"baredir/sub:1/file:7")) + self.failUnlessReallyEqual(ga3(u"tahoe:"), (TA, b"")) + self.failUnlessReallyEqual(ga3(u"tahoe:file"), (TA, b"file")) + self.failUnlessReallyEqual(ga3(u"tahoe:dir/file"), (TA, b"dir/file")) + self.failUnlessReallyEqual(ga3(u"c:"), (DefaultAliasMarker, b"c:")) + self.failUnlessReallyEqual(ga3(u"c:file"), (DefaultAliasMarker, b"c:file")) self.failUnlessReallyEqual(ga3(u"c:dir/file"), - (DefaultAliasMarker, "c:dir/file")) - self.failUnlessReallyEqual(ga3(u"work:"), (WA, "")) - self.failUnlessReallyEqual(ga3(u"work:file"), (WA, "file")) - self.failUnlessReallyEqual(ga3(u"work:dir/file"), (WA, "dir/file")) - self.failUnlessReallyEqual(ga3(u"URI:stuff"), ("URI:stuff", "")) - self.failUnlessReallyEqual(ga3(u"URI:stuff:./file"), ("URI:stuff", "file")) - self.failUnlessReallyEqual(ga3(u"URI:stuff:./dir/file"), ("URI:stuff", "dir/file")) + (DefaultAliasMarker, b"c:dir/file")) + self.failUnlessReallyEqual(ga3(u"work:"), (WA, b"")) + self.failUnlessReallyEqual(ga3(u"work:file"), (WA, b"file")) + self.failUnlessReallyEqual(ga3(u"work:dir/file"), (WA, b"dir/file")) + self.failUnlessReallyEqual(ga3(u"URI:stuff"), (b"URI:stuff", b"")) + self.failUnlessReallyEqual(ga3(u"URI:stuff:./file"), (b"URI:stuff", b"file")) + self.failUnlessReallyEqual(ga3(u"URI:stuff:./dir/file"), (b"URI:stuff", b"dir/file")) self.failUnlessRaises(common.UnknownAliasError, ga3, u"missing:") self.failUnlessRaises(common.UnknownAliasError, ga3, u"missing:dir") self.failUnlessRaises(common.UnknownAliasError, ga3, u"missing:dir/file") @@ -480,14 +496,14 @@ class CLI(CLITestMixin, unittest.TestCase): self.failUnlessRaises(common.UnknownAliasError, ga5, u"C:\\Windows") def test_alias_tolerance(self): - def s128(c): return base32.b2a(c*(128/8)) - def s256(c): return base32.b2a(c*(256/8)) - TA = "URI:DIR2:%s:%s" % (s128("T"), s256("T")) + def s128(c): return base32.b2a(c*(128//8)) + def s256(c): return base32.b2a(c*(256//8)) + TA = b"URI:DIR2:%s:%s" % (s128(b"T"), s256(b"T")) aliases = {"present": TA, - "future": "URI-FROM-FUTURE:ooh:aah"} + "future": b"URI-FROM-FUTURE:ooh:aah"} def ga1(path): return get_alias(aliases, path, u"tahoe") - self.failUnlessReallyEqual(ga1(u"present:file"), (TA, "file")) + self.failUnlessReallyEqual(ga1(u"present:file"), (TA, b"file")) # this throws, via assert IDirnodeURI.providedBy(), since get_alias() # wants a dirnode, and the future cap gives us UnknownURI instead. self.failUnlessRaises(AssertionError, ga1, u"future:stuff") @@ -502,9 +518,9 @@ class CLI(CLITestMixin, unittest.TestCase): fileutil.make_dirs(basedir) for name in filenames: - open(os.path.join(unicode(basedir), name), "wb").close() + open(os.path.join(str(basedir), name), "wb").close() - for file in listdir_unicode(unicode(basedir)): + for file in listdir_unicode(str(basedir)): self.failUnlessIn(normalize(file), filenames) def test_exception_catcher(self): @@ -671,7 +687,7 @@ class Ln(GridTestMixin, CLITestMixin, unittest.TestCase): (rc, out, err) = args self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(out, "") d.addCallback(_check) # Make sure that validation extends to the "to" parameter d.addCallback(lambda ign: self.do_cli("create-alias", "havasu")) @@ -718,8 +734,9 @@ class Admin(unittest.TestCase): self.failUnlessEqual(pubkey_bits[0], vk_header, lines[1]) self.failUnless(privkey_bits[1].startswith("priv-v0-"), lines[0]) self.failUnless(pubkey_bits[1].startswith("pub-v0-"), lines[1]) - sk, pk = ed25519.signing_keypair_from_string(privkey_bits[1]) - vk_bytes = pubkey_bits[1] + sk, pk = ed25519.signing_keypair_from_string( + privkey_bits[1].encode("ascii")) + vk_bytes = pubkey_bits[1].encode("ascii") self.assertEqual( ed25519.string_from_verifying_key(pk), vk_bytes, @@ -729,8 +746,8 @@ class Admin(unittest.TestCase): def test_derive_pubkey(self): priv_key, pub_key = ed25519.create_signing_keypair() - priv_key_str = ed25519.string_from_signing_key(priv_key) - pub_key_str = ed25519.string_from_verifying_key(pub_key) + priv_key_str = str(ed25519.string_from_signing_key(priv_key), "ascii") + pub_key_str = str(ed25519.string_from_verifying_key(pub_key), "ascii") d = run_cli("admin", "derive-pubkey", priv_key_str) def _done(args): (rc, stdout, stderr) = args @@ -753,11 +770,11 @@ class Errors(GridTestMixin, CLITestMixin, unittest.TestCase): self.set_up_grid() c0 = self.g.clients[0] self.fileurls = {} - DATA = "data" * 100 - d = c0.upload(upload.Data(DATA, convergence="")) + DATA = b"data" * 100 + d = c0.upload(upload.Data(DATA, convergence=b"")) def _stash_bad(ur): self.uri_1share = ur.get_uri() - self.delete_shares_numbered(ur.get_uri(), range(1,10)) + self.delete_shares_numbered(ur.get_uri(), list(range(1,10))) d.addCallback(_stash_bad) # the download is abandoned as soon as it's clear that we won't get @@ -821,7 +838,7 @@ class Get(GridTestMixin, CLITestMixin, unittest.TestCase): (rc, out, err) = args self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(out, "") d.addCallback(_check) return d @@ -836,7 +853,7 @@ class Get(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) self.failUnlessIn("nonexistent", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(out, "") d.addCallback(_check) return d @@ -853,7 +870,7 @@ class Manifest(GridTestMixin, CLITestMixin, unittest.TestCase): (rc, out, err) = args self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(out, "") d.addCallback(_check) return d @@ -868,7 +885,7 @@ class Manifest(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) self.failUnlessIn("nonexistent", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(out, "") d.addCallback(_check) return d @@ -883,7 +900,7 @@ class Mkdir(GridTestMixin, CLITestMixin, unittest.TestCase): def _check(args): (rc, out, err) = args self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual(err, "") + self.assertEqual(err, "") self.failUnlessIn("URI:", out) d.addCallback(_check) @@ -896,7 +913,7 @@ class Mkdir(GridTestMixin, CLITestMixin, unittest.TestCase): def _check(args, st): (rc, out, err) = args self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual(err, "") + self.assertEqual(err, "") self.failUnlessIn(st, out) return out @@ -932,7 +949,7 @@ class Mkdir(GridTestMixin, CLITestMixin, unittest.TestCase): def _check(args, st): (rc, out, err) = args self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual(err, "") + self.assertEqual(err, "") self.failUnlessIn(st, out) return out d.addCallback(_check, "URI:DIR2") @@ -976,7 +993,7 @@ class Mkdir(GridTestMixin, CLITestMixin, unittest.TestCase): def _check(args): (rc, out, err) = args self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual(err, "") + self.assertEqual(err, "") self.failUnlessIn("URI:", out) d.addCallback(_check) @@ -992,7 +1009,7 @@ class Mkdir(GridTestMixin, CLITestMixin, unittest.TestCase): (rc, out, err) = args self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(out, "") d.addCallback(_check) return d @@ -1016,7 +1033,7 @@ class Unlink(GridTestMixin, CLITestMixin, unittest.TestCase): (rc, out, err) = args self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(out, "") d.addCallback(_check) d.addCallback(lambda ign: self.do_cli(self.command, "afile")) @@ -1034,7 +1051,7 @@ class Unlink(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) self.failUnlessIn("nonexistent", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(out, "") d.addCallback(_check) d.addCallback(lambda ign: self.do_cli(self.command, "nonexistent:afile")) @@ -1060,7 +1077,7 @@ class Unlink(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("'tahoe %s'" % (self.command,), err) self.failUnlessIn("path must be given", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(out, "") d.addCallback(_check) return d @@ -1081,7 +1098,7 @@ class Stats(GridTestMixin, CLITestMixin, unittest.TestCase): d.addCallback(lambda ign: self.do_cli("stats", self.rooturi)) def _check_stats(args): (rc, out, err) = args - self.failUnlessReallyEqual(err, "") + self.assertEqual(err, "") self.failUnlessReallyEqual(rc, 0) lines = out.splitlines() self.failUnlessIn(" count-immutable-files: 0", lines) @@ -1105,7 +1122,7 @@ class Stats(GridTestMixin, CLITestMixin, unittest.TestCase): (rc, out, err) = args self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(out, "") d.addCallback(_check) return d @@ -1119,7 +1136,7 @@ class Stats(GridTestMixin, CLITestMixin, unittest.TestCase): (rc, out, err) = args self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(out, "") d.addCallback(_check) return d @@ -1136,7 +1153,7 @@ class Webopen(GridTestMixin, CLITestMixin, unittest.TestCase): (rc, out, err) = args self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(out, "") d.addCallback(_check) return d @@ -1144,7 +1161,7 @@ class Webopen(GridTestMixin, CLITestMixin, unittest.TestCase): # TODO: replace with @patch that supports Deferreds. import webbrowser def call_webbrowser_open(url): - self.failUnlessIn(self.alias_uri.replace(':', '%3A'), url) + self.failUnlessIn(str(self.alias_uri, "ascii").replace(':', '%3A'), url) self.webbrowser_open_called = True def _cleanup(res): webbrowser.open = self.old_webbrowser_open @@ -1161,15 +1178,15 @@ class Webopen(GridTestMixin, CLITestMixin, unittest.TestCase): (rc, out, err) = args self.failUnlessReallyEqual(rc, 0, repr((rc, out, err))) self.failUnlessIn("Alias 'alias' created", out) - self.failUnlessReallyEqual(err, "") + self.assertEqual(err, "") self.alias_uri = get_aliases(self.get_clientdir())["alias"] d.addCallback(_check_alias) d.addCallback(lambda res: self.do_cli("webopen", "alias:")) def _check_webopen(args): (rc, out, err) = args self.failUnlessReallyEqual(rc, 0, repr((rc, out, err))) - self.failUnlessReallyEqual(out, "") - self.failUnlessReallyEqual(err, "") + self.assertEqual(out, "") + self.assertEqual(err, "") self.failUnless(self.webbrowser_open_called) d.addCallback(_check_webopen) d.addBoth(_cleanup) @@ -1195,31 +1212,31 @@ class Options(ReallyEqualMixin, unittest.TestCase): fileutil.make_dirs("cli/test_options") fileutil.make_dirs("cli/test_options/private") fileutil.write("cli/test_options/node.url", "http://localhost:8080/\n") - filenode_uri = uri.WriteableSSKFileURI(writekey="\x00"*16, - fingerprint="\x00"*32) + filenode_uri = uri.WriteableSSKFileURI(writekey=b"\x00"*16, + fingerprint=b"\x00"*32) private_uri = uri.DirectoryURI(filenode_uri).to_string() - fileutil.write("cli/test_options/private/root_dir.cap", private_uri + "\n") + fileutil.write("cli/test_options/private/root_dir.cap", private_uri + b"\n") def parse2(args): return parse_options("cli/test_options", "ls", args) o = parse2([]) self.failUnlessEqual(o['node-url'], "http://localhost:8080/") - self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri) + self.failUnlessEqual(o.aliases[DEFAULT_ALIAS].encode("ascii"), private_uri) self.failUnlessEqual(o.where, u"") o = parse2(["--node-url", "http://example.org:8111/"]) self.failUnlessEqual(o['node-url'], "http://example.org:8111/") - self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri) + self.failUnlessEqual(o.aliases[DEFAULT_ALIAS].encode("ascii"), private_uri) self.failUnlessEqual(o.where, u"") # -u for --node-url used to clash with -u for --uri (tickets #1949 and #2137). o = parse2(["-u", "http://example.org:8111/"]) self.failUnlessEqual(o['node-url'], "http://example.org:8111/") - self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri) + self.failUnlessEqual(o.aliases[DEFAULT_ALIAS].encode("ascii"), private_uri) self.failUnlessEqual(o.where, u"") self.failIf(o["uri"]) o = parse2(["-u", "http://example.org:8111/", "--uri"]) self.failUnlessEqual(o['node-url'], "http://example.org:8111/") - self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri) + self.failUnlessEqual(o.aliases[DEFAULT_ALIAS].encode("ascii"), private_uri) self.failUnlessEqual(o.where, u"") self.failUnless(o["uri"]) @@ -1228,17 +1245,17 @@ class Options(ReallyEqualMixin, unittest.TestCase): self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], "root") self.failUnlessEqual(o.where, u"") - other_filenode_uri = uri.WriteableSSKFileURI(writekey="\x11"*16, - fingerprint="\x11"*32) + other_filenode_uri = uri.WriteableSSKFileURI(writekey=b"\x11"*16, + fingerprint=b"\x11"*32) other_uri = uri.DirectoryURI(other_filenode_uri).to_string() o = parse2(["--dir-cap", other_uri]) self.failUnlessEqual(o['node-url'], "http://localhost:8080/") - self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri) + self.failUnlessEqual(o.aliases[DEFAULT_ALIAS].encode("ascii"), other_uri) self.failUnlessEqual(o.where, u"") o = parse2(["--dir-cap", other_uri, "subdir"]) self.failUnlessEqual(o['node-url'], "http://localhost:8080/") - self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri) + self.failUnlessEqual(o.aliases[DEFAULT_ALIAS].encode("ascii"), other_uri) self.failUnlessEqual(o.where, u"subdir") self.failUnlessRaises(usage.UsageError, parse2, @@ -1325,7 +1342,7 @@ class Run(unittest.TestCase): If the pidfile exists but does not contain a numeric value, a complaint to this effect is written to stderr. """ - basedir = FilePath(self.mktemp().decode("ascii")) + basedir = FilePath(ensure_str(self.mktemp())) basedir.makedirs() basedir.child(u"twistd.pid").setContent(b"foo") basedir.child(u"tahoe-client.tac").setContent(b"") @@ -1333,7 +1350,7 @@ class Run(unittest.TestCase): config = tahoe_run.RunOptions() config.stdout = StringIO() config.stderr = StringIO() - config['basedir'] = basedir.path + config['basedir'] = ensure_text(basedir.path) config.twistd_args = [] result_code = tahoe_run.run(config) diff --git a/src/allmydata/test/cli/test_cp.py b/src/allmydata/test/cli/test_cp.py index 6cebec4a5..d198a832c 100644 --- a/src/allmydata/test/cli/test_cp.py +++ b/src/allmydata/test/cli/test_cp.py @@ -1,4 +1,14 @@ +""" +Ported to Python 3. +""" from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os.path, json from twisted.trial import unittest @@ -24,12 +34,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase): def test_unicode_filename(self): self.basedir = "cli/Cp/unicode_filename" - fn1 = os.path.join(unicode(self.basedir), u"\u00C4rtonwall") - try: - fn1_arg = fn1.encode(get_io_encoding()) - artonwall_arg = u"\u00C4rtonwall".encode(get_io_encoding()) - except UnicodeEncodeError: - raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.") + fn1 = os.path.join(self.basedir, u"\u00C4rtonwall") + artonwall_arg = u"\u00C4rtonwall" skip_if_cannot_represent_filename(fn1) @@ -44,15 +50,15 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase): d = self.do_cli("create-alias", "tahoe") - d.addCallback(lambda res: self.do_cli("cp", fn1_arg, "tahoe:")) + d.addCallback(lambda res: self.do_cli("cp", fn1, "tahoe:")) d.addCallback(lambda res: self.do_cli("get", "tahoe:" + artonwall_arg)) - d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA1)) + d.addCallback(lambda rc_out_err: self.assertEqual(rc_out_err[1], DATA1)) d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:")) d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica")) - d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA2)) + d.addCallback(lambda rc_out_err: self.assertEqual(rc_out_err[1], DATA2)) d.addCallback(lambda res: self.do_cli("ls", "tahoe:")) def _check(args): @@ -66,8 +72,10 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessIn("files whose names could not be converted", err) else: self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual(out.decode(get_io_encoding()), u"Metallica\n\u00C4rtonwall\n") - self.failUnlessReallyEqual(err, "") + if PY2: + out = out.decode(get_io_encoding()) + self.failUnlessReallyEqual(out, u"Metallica\n\u00C4rtonwall\n") + self.assertEqual(len(err), 0, err) d.addCallback(_check) return d @@ -98,7 +106,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase): fn1 = os.path.join(self.basedir, "Metallica") fn2 = os.path.join(outdir, "Not Metallica") fn3 = os.path.join(outdir, "test2") - DATA1 = "puppies" * 10000 + DATA1 = b"puppies" * 10000 fileutil.write(fn1, DATA1) d = self.do_cli("create-alias", "tahoe") @@ -128,7 +136,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("when copying into a directory, all source files must have names, but", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(len(out), 0, out) d.addCallback(_resp) # Create a directory, linked at tahoe:test . @@ -200,13 +208,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase): def test_unicode_dirnames(self): self.basedir = "cli/Cp/unicode_dirnames" - fn1 = os.path.join(unicode(self.basedir), u"\u00C4rtonwall") - try: - fn1_arg = fn1.encode(get_io_encoding()) - del fn1_arg # hush pyflakes - artonwall_arg = u"\u00C4rtonwall".encode(get_io_encoding()) - except UnicodeEncodeError: - raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.") + fn1 = os.path.join(self.basedir, u"\u00C4rtonwall") + artonwall_arg = u"\u00C4rtonwall" skip_if_cannot_represent_filename(fn1) @@ -222,13 +225,15 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase): unicode_to_output(u"\u00C4rtonwall") except UnicodeEncodeError: self.failUnlessReallyEqual(rc, 1) - self.failUnlessReallyEqual(out, "") + self.assertEqual(len(out), 0, out) self.failUnlessIn(quote_output(u"\u00C4rtonwall"), err) self.failUnlessIn("files whose names could not be converted", err) else: self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual(out.decode(get_io_encoding()), u"\u00C4rtonwall\n") - self.failUnlessReallyEqual(err, "") + if PY2: + out = out.decode(get_io_encoding()) + self.failUnlessReallyEqual(out, u"\u00C4rtonwall\n") + self.assertEqual(len(err), 0, err) d.addCallback(_check) return d @@ -818,9 +823,9 @@ cp -r $DIRCAP5 $DIRCAP6 to : E9-COLLIDING-TARGETS """ class CopyOut(GridTestMixin, CLITestMixin, unittest.TestCase): - FILE_CONTENTS = "file text" - FILE_CONTENTS_5 = "5" - FILE_CONTENTS_6 = "6" + FILE_CONTENTS = b"file text" + FILE_CONTENTS_5 = b"5" + FILE_CONTENTS_6 = b"6" def do_setup(self): # first we build a tahoe filesystem that contains: diff --git a/src/allmydata/test/cli/test_create_alias.py b/src/allmydata/test/cli/test_create_alias.py index ea3200e2e..4a252f372 100644 --- a/src/allmydata/test/cli/test_create_alias.py +++ b/src/allmydata/test/cli/test_create_alias.py @@ -1,12 +1,26 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + +from six import ensure_str from six.moves import StringIO import os.path from twisted.trial import unittest -import urllib +from urllib.parse import quote as url_quote + from allmydata.util import fileutil from allmydata.scripts.common import get_aliases from allmydata.scripts import cli, runner from ..no_network import GridTestMixin -from allmydata.util.encodingutil import quote_output, get_io_encoding +from allmydata.util.encodingutil import quote_output from .common import CLITestMixin class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase): @@ -22,7 +36,7 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase): rc = cli.webopen(o.subOptions, urls.append) self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(len(urls), 1) - self.failUnlessReallyEqual(urls[0], expected_url) + self.assertEqual(urls[0], expected_url) def test_create(self): self.basedir = "cli/CreateAlias/create" @@ -36,19 +50,19 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase): self.assertIn("Alias 'tahoe' created", stdout) aliases = get_aliases(self.get_clientdir()) self.failUnless("tahoe" in aliases) - self.failUnless(aliases["tahoe"].startswith("URI:DIR2:")) + self.failUnless(aliases["tahoe"].startswith(b"URI:DIR2:")) d.addCallback(_done) d.addCallback(lambda res: self.do_cli("create-alias", "two:")) def _stash_urls(res): aliases = get_aliases(self.get_clientdir()) node_url_file = os.path.join(self.get_clientdir(), "node.url") - nodeurl = fileutil.read(node_url_file).strip() + nodeurl = fileutil.read(node_url_file, mode="r").strip() self.welcome_url = nodeurl uribase = nodeurl + "uri/" - self.tahoe_url = uribase + urllib.quote(aliases["tahoe"]) + self.tahoe_url = uribase + url_quote(aliases["tahoe"]) self.tahoe_subdir_url = self.tahoe_url + "/subdir" - self.two_url = uribase + urllib.quote(aliases["two"]) + self.two_url = uribase + url_quote(aliases["two"]) self.two_uri = aliases["two"] d.addCallback(_stash_urls) @@ -128,13 +142,13 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase): # like a valid dircap, so get_aliases() will raise an exception. aliases = get_aliases(self.get_clientdir()) self.failUnless("added" in aliases) - self.failUnless(aliases["added"].startswith("URI:DIR2:")) + self.failUnless(aliases["added"].startswith(b"URI:DIR2:")) # to be safe, let's confirm that we don't see "NAME2:" in CAP1. # No chance of a false-negative, because the hyphen in # "un-corrupted1" is not a valid base32 character. - self.failIfIn("un-corrupted1:", aliases["added"]) + self.failIfIn(b"un-corrupted1:", aliases["added"]) self.failUnless("un-corrupted1" in aliases) - self.failUnless(aliases["un-corrupted1"].startswith("URI:DIR2:")) + self.failUnless(aliases["un-corrupted1"].startswith(b"URI:DIR2:")) d.addCallback(_check_not_corrupted1) def _remove_trailing_newline_and_add_alias(ign): @@ -149,10 +163,10 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase): self.failIf(stderr) aliases = get_aliases(self.get_clientdir()) self.failUnless("un-corrupted1" in aliases) - self.failUnless(aliases["un-corrupted1"].startswith("URI:DIR2:")) - self.failIfIn("un-corrupted2:", aliases["un-corrupted1"]) + self.failUnless(aliases["un-corrupted1"].startswith(b"URI:DIR2:")) + self.failIfIn(b"un-corrupted2:", aliases["un-corrupted1"]) self.failUnless("un-corrupted2" in aliases) - self.failUnless(aliases["un-corrupted2"].startswith("URI:DIR2:")) + self.failUnless(aliases["un-corrupted2"].startswith(b"URI:DIR2:")) d.addCallback(_check_not_corrupted) return d @@ -160,61 +174,62 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase): self.basedir = "cli/CreateAlias/create_unicode" self.set_up_grid(oneshare=True) - try: - etudes_arg = u"\u00E9tudes".encode(get_io_encoding()) - lumiere_arg = u"lumi\u00E8re.txt".encode(get_io_encoding()) - except UnicodeEncodeError: - raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.") + etudes_arg = u"\u00E9tudes" + lumiere_arg = u"lumi\u00E8re.txt" d = self.do_cli("create-alias", etudes_arg) def _check_create_unicode(args): (rc, out, err) = args self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual(err, "") - self.failUnlessIn("Alias %s created" % quote_output(u"\u00E9tudes"), out) + self.assertEqual(len(err), 0, err) + self.failUnlessIn(ensure_str("Alias %s created") % quote_output(etudes_arg), out) aliases = get_aliases(self.get_clientdir()) - self.failUnless(aliases[u"\u00E9tudes"].startswith("URI:DIR2:")) + self.failUnless(aliases[u"\u00E9tudes"].startswith(b"URI:DIR2:")) d.addCallback(_check_create_unicode) d.addCallback(lambda res: self.do_cli("ls", etudes_arg + ":")) def _check_ls1(args): (rc, out, err) = args self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual(err, "") - self.failUnlessReallyEqual(out, "") + self.assertEqual(len(err), 0, err) + self.assertEqual(len(out), 0, out) d.addCallback(_check_ls1) + DATA = b"Blah blah blah \xff blah \x00 blah" d.addCallback(lambda res: self.do_cli("put", "-", etudes_arg + ":uploaded.txt", - stdin="Blah blah blah")) + stdin=DATA)) d.addCallback(lambda res: self.do_cli("ls", etudes_arg + ":")) def _check_ls2(args): (rc, out, err) = args self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual(err, "") - self.failUnlessReallyEqual(out, "uploaded.txt\n") + self.assertEqual(len(err), 0, err) + self.assertEqual(out, "uploaded.txt\n") d.addCallback(_check_ls2) - d.addCallback(lambda res: self.do_cli("get", etudes_arg + ":uploaded.txt")) + d.addCallback(lambda res: self.do_cli("get", etudes_arg + ":uploaded.txt", + return_bytes=True)) def _check_get(args): (rc, out, err) = args self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual(err, "") - self.failUnlessReallyEqual(out, "Blah blah blah") + self.assertEqual(len(err), 0, err) + self.failUnlessReallyEqual(out, DATA) d.addCallback(_check_get) # Ensure that an Unicode filename in an Unicode alias works as expected d.addCallback(lambda res: self.do_cli("put", "-", etudes_arg + ":" + lumiere_arg, - stdin="Let the sunshine In!")) + stdin=b"Let the sunshine In!")) - d.addCallback(lambda res: self.do_cli("get", - get_aliases(self.get_clientdir())[u"\u00E9tudes"] + "/" + lumiere_arg)) + d.addCallback(lambda res: self.do_cli( + "get", + str(get_aliases(self.get_clientdir())[u"\u00E9tudes"], "ascii") + "/" + lumiere_arg, + return_bytes=True)) def _check_get2(args): (rc, out, err) = args self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual(err, "") - self.failUnlessReallyEqual(out, "Let the sunshine In!") + self.assertEqual(len(err), 0, err) + self.failUnlessReallyEqual(out, b"Let the sunshine In!") d.addCallback(_check_get2) return d diff --git a/src/allmydata/test/cli/test_list.py b/src/allmydata/test/cli/test_list.py index fff57cdc9..1206579f1 100644 --- a/src/allmydata/test/cli/test_list.py +++ b/src/allmydata/test/cli/test_list.py @@ -1,3 +1,16 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2, PY3 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from six import ensure_str + from twisted.trial import unittest from twisted.internet import defer @@ -8,61 +21,52 @@ from ..no_network import GridTestMixin from allmydata.util.encodingutil import quote_output, get_io_encoding from .common import CLITestMixin + class List(GridTestMixin, CLITestMixin, unittest.TestCase): def test_list(self): self.basedir = "cli/List/list" self.set_up_grid() c0 = self.g.clients[0] - small = "small" + small = b"small" - # u"g\u00F6\u00F6d" might not be representable in the argv and/or output encodings. - # It is initially included in the directory in any case. - try: - good_arg = u"g\u00F6\u00F6d".encode(get_io_encoding()) - except UnicodeEncodeError: - good_arg = None + good_arg = u"g\u00F6\u00F6d" + good_out = u"g\u00F6\u00F6d" - try: - good_out = u"g\u00F6\u00F6d".encode(get_io_encoding()) - except UnicodeEncodeError: - good_out = None + # On Python 2 we get bytes, so we need encoded version. On Python 3 + # stdio is unicode so can leave unchanged. + good_out_encoded = good_out if PY3 else good_out.encode(get_io_encoding()) d = c0.create_dirnode() def _stash_root_and_create_file(n): self.rootnode = n - self.rooturi = n.get_uri() - return n.add_file(u"g\u00F6\u00F6d", upload.Data(small, convergence="")) + self.rooturi = str(n.get_uri(), "utf-8") + return n.add_file(u"g\u00F6\u00F6d", upload.Data(small, convergence=b"")) d.addCallback(_stash_root_and_create_file) def _stash_goodcap(n): self.goodcap = n.get_uri() d.addCallback(_stash_goodcap) d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"1share")) d.addCallback(lambda n: - self.delete_shares_numbered(n.get_uri(), range(1,10))) + self.delete_shares_numbered(n.get_uri(), list(range(1,10)))) d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"0share")) d.addCallback(lambda n: - self.delete_shares_numbered(n.get_uri(), range(0,10))) + self.delete_shares_numbered(n.get_uri(), list(range(0,10)))) d.addCallback(lambda ign: self.do_cli("add-alias", "tahoe", self.rooturi)) d.addCallback(lambda ign: self.do_cli("ls")) def _check1(args): (rc, out, err) = args - if good_out is None: - self.failUnlessReallyEqual(rc, 1) - self.failUnlessIn("files whose names could not be converted", err) - self.failUnlessIn(quote_output(u"g\u00F6\u00F6d"), err) - self.failUnlessReallyEqual(sorted(out.splitlines()), sorted(["0share", "1share"])) - else: - self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual(err, "") - self.failUnlessReallyEqual(sorted(out.splitlines()), sorted(["0share", "1share", good_out])) + self.failUnlessReallyEqual(rc, 0) + self.assertEqual(len(err), 0, err) + expected = sorted([ensure_str("0share"), ensure_str("1share"), good_out_encoded]) + self.assertEqual(sorted(out.splitlines()), expected) d.addCallback(_check1) d.addCallback(lambda ign: self.do_cli("ls", "missing")) def _check2(args): (rc, out, err) = args self.failIfEqual(rc, 0) - self.failUnlessReallyEqual(err.strip(), "No such file or directory") - self.failUnlessReallyEqual(out, "") + self.assertEqual(err.strip(), "No such file or directory") + self.assertEqual(len(out), 0, out) d.addCallback(_check2) d.addCallback(lambda ign: self.do_cli("ls", "1share")) def _check3(args): @@ -72,7 +76,7 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessIn("UnrecoverableFileError:", err) self.failUnlessIn("could not be retrieved, because there were " "insufficient good shares.", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(len(out), 0, out) d.addCallback(_check3) d.addCallback(lambda ign: self.do_cli("ls", "0share")) d.addCallback(_check3) @@ -82,13 +86,13 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("files whose names could not be converted", err) self.failUnlessIn(quote_output(u"g\u00F6\u00F6d"), err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(len(out), 0, out) else: # listing a file (as dir/filename) should have the edge metadata, # including the filename self.failUnlessReallyEqual(rc, 0) - self.failUnlessIn(good_out, out) - self.failIfIn("-r-- %d -" % len(small), out, + self.failUnlessIn(good_out_encoded, out) + self.failIfIn(ensure_str("-r-- %d -" % len(small)), out, "trailing hyphen means unknown date") if good_arg is not None: @@ -106,7 +110,7 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase): # metadata, just the size (rc, out, err) = args self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual("-r-- %d -" % len(small), out.strip()) + self.assertEqual("-r-- %d -" % len(small), out.strip()) d.addCallback(lambda ign: self.do_cli("ls", "-l", self.goodcap)) d.addCallback(_check5) @@ -118,7 +122,7 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase): def _check1_ascii(args): (rc,out,err) = args self.failUnlessReallyEqual(rc, 0) - self.failUnlessReallyEqual(err, "") + self.assertEqual(len(err), 0, err) self.failUnlessReallyEqual(sorted(out.splitlines()), sorted(["0share", "1share", "good"])) d.addCallback(_check1_ascii) def _check4_ascii(args): @@ -139,7 +143,7 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase): d.addCallback(lambda ign: self.do_cli("ls", "-l", self.rooturi + ":./good")) d.addCallback(_check4_ascii) - unknown_immcap = "imm.URI:unknown" + unknown_immcap = b"imm.URI:unknown" def _create_unknown(ign): nm = c0.nodemaker kids = {u"unknownchild-imm": (nm.create_from_cap(unknown_immcap), {})} @@ -178,7 +182,7 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase): (rc, out, err) = args self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(len(out), 0, out) d.addCallback(_check) return d @@ -193,7 +197,7 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) self.failUnlessIn("nonexistent", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(len(out), 0, out) d.addCallback(_check) return d @@ -226,8 +230,8 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase): # The uploaders may run at the same time, so we need two # MutableData instances or they'll fight over offsets &c and # break. - mutable_data = MutableData("data" * 100000) - mutable_data2 = MutableData("data" * 100000) + mutable_data = MutableData(b"data" * 100000) + mutable_data2 = MutableData(b"data" * 100000) # Add both kinds of mutable node. d1 = nm.create_mutable_file(mutable_data, version=MDMF_VERSION) @@ -235,8 +239,8 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase): version=SDMF_VERSION) # Add an immutable node. We do this through the directory, # with add_file. - immutable_data = upload.Data("immutable data" * 100000, - convergence="") + immutable_data = upload.Data(b"immutable data" * 100000, + convergence=b"") d3 = n.add_file(u"immutable", immutable_data) ds = [d1, d2, d3] dl = defer.DeferredList(ds) @@ -294,12 +298,12 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase): def _got_json(args): (rc, out, err) = args self.failUnlessEqual(rc, 0) - self.failUnlessEqual(err, "") - self.failUnlessIn(self._mdmf_uri, out) - self.failUnlessIn(self._mdmf_readonly_uri, out) - self.failUnlessIn(self._sdmf_uri, out) - self.failUnlessIn(self._sdmf_readonly_uri, out) - self.failUnlessIn(self._imm_uri, out) + self.assertEqual(len(err), 0, err) + self.failUnlessIn(str(self._mdmf_uri, "ascii"), out) + self.failUnlessIn(str(self._mdmf_readonly_uri, "ascii"), out) + self.failUnlessIn(str(self._sdmf_uri, "ascii"), out) + self.failUnlessIn(str(self._sdmf_readonly_uri, "ascii"), out) + self.failUnlessIn(str(self._imm_uri, "ascii"), out) self.failUnlessIn('"format": "SDMF"', out) self.failUnlessIn('"format": "MDMF"', out) d.addCallback(_got_json) diff --git a/src/allmydata/test/cli/test_mv.py b/src/allmydata/test/cli/test_mv.py index 9d1a64974..0bb9ba369 100644 --- a/src/allmydata/test/cli/test_mv.py +++ b/src/allmydata/test/cli/test_mv.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import os.path from twisted.trial import unittest from allmydata.util import fileutil @@ -5,15 +17,16 @@ from ..no_network import GridTestMixin from allmydata.scripts import tahoe_mv from .common import CLITestMixin + class Mv(GridTestMixin, CLITestMixin, unittest.TestCase): def test_mv_behavior(self): self.basedir = "cli/Mv/mv_behavior" self.set_up_grid(oneshare=True) fn1 = os.path.join(self.basedir, "file1") - DATA1 = "Nuclear launch codes" + DATA1 = b"Nuclear launch codes" fileutil.write(fn1, DATA1) fn2 = os.path.join(self.basedir, "file2") - DATA2 = "UML diagrams" + DATA2 = b"UML diagrams" fileutil.write(fn2, DATA2) # copy both files to the grid d = self.do_cli("create-alias", "tahoe") @@ -104,11 +117,11 @@ class Mv(GridTestMixin, CLITestMixin, unittest.TestCase): self.basedir = "cli/Mv/mv_error_if_DELETE_fails" self.set_up_grid(oneshare=True) fn1 = os.path.join(self.basedir, "file1") - DATA1 = "Nuclear launch codes" + DATA1 = b"Nuclear launch codes" fileutil.write(fn1, DATA1) original_do_http = tahoe_mv.do_http - def mock_do_http(method, url, body=""): + def mock_do_http(method, url, body=b""): if method == "DELETE": class FakeResponse(object): def read(self): @@ -152,7 +165,7 @@ class Mv(GridTestMixin, CLITestMixin, unittest.TestCase): (rc, out, err) = args self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(len(out), 0, out) d.addCallback(_check) # check to see that the validation extends to the # target argument by making an alias that will work with the first @@ -180,7 +193,7 @@ class Mv(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) self.failUnlessIn("fake", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(len(out), 0, out) d.addCallback(_check) # check to see that the validation extends to the # target argument by making an alias that will work with the first diff --git a/src/allmydata/test/cli/test_put.py b/src/allmydata/test/cli/test_put.py index 3392e67b4..c6a577074 100644 --- a/src/allmydata/test/cli/test_put.py +++ b/src/allmydata/test/cli/test_put.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import os.path from twisted.trial import unittest from twisted.python import usage @@ -17,7 +29,7 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): # tahoe get `echo DATA | tahoe put` # tahoe get `echo DATA | tahoe put -` self.basedir = "cli/Put/unlinked_immutable_stdin" - DATA = "data" * 100 + DATA = b"data\xff" * 100 self.set_up_grid(oneshare=True) d = self.do_cli("put", stdin=DATA) def _uploaded(res): @@ -27,10 +39,11 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): self.readcap = out self.failUnless(self.readcap.startswith("URI:CHK:")) d.addCallback(_uploaded) - d.addCallback(lambda res: self.do_cli("get", self.readcap)) + d.addCallback(lambda res: self.do_cli("get", self.readcap, + return_bytes=True)) def _downloaded(res): (rc, out, err) = res - self.failUnlessReallyEqual(err, "") + self.failUnlessReallyEqual(err, b"") self.failUnlessReallyEqual(out, DATA) d.addCallback(_downloaded) d.addCallback(lambda res: self.do_cli("put", "-", stdin=DATA)) @@ -46,10 +59,10 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): self.basedir = "cli/Put/unlinked_immutable_from_file" self.set_up_grid(oneshare=True) - rel_fn = unicode(os.path.join(self.basedir, "DATAFILE")) + rel_fn = str(os.path.join(self.basedir, "DATAFILE")) abs_fn = abspath_expanduser_unicode(rel_fn) # we make the file small enough to fit in a LIT file, for speed - fileutil.write(rel_fn, "short file") + fileutil.write(rel_fn, b"short file has some bytes \xff yes") d = self.do_cli_unicode(u"put", [rel_fn]) def _uploaded(args): (rc, out, err) = args @@ -79,8 +92,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): rel_fn = os.path.join(self.basedir, "DATAFILE") # we make the file small enough to fit in a LIT file, for speed - DATA = "short file" - DATA2 = "short file two" + DATA = b"short file" + DATA2 = b"short file two" fileutil.write(rel_fn, DATA) d = self.do_cli("create-alias", "tahoe") @@ -95,7 +108,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): self.readcap = readcap d.addCallback(_uploaded) d.addCallback(lambda res: - self.do_cli("get", "tahoe:uploaded.txt")) + self.do_cli("get", "tahoe:uploaded.txt", + return_bytes=True)) d.addCallback(lambda rc_stdout_stderr: self.failUnlessReallyEqual(rc_stdout_stderr[1], DATA)) @@ -110,32 +124,36 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): d.addCallback(lambda res: self.do_cli("put", rel_fn, "subdir/uploaded2.txt")) - d.addCallback(lambda res: self.do_cli("get", "subdir/uploaded2.txt")) + d.addCallback(lambda res: self.do_cli("get", "subdir/uploaded2.txt", + return_bytes=True)) d.addCallback(lambda rc_stdout_stderr: self.failUnlessReallyEqual(rc_stdout_stderr[1], DATA)) d.addCallback(lambda res: self.do_cli("put", rel_fn, "tahoe:uploaded3.txt")) - d.addCallback(lambda res: self.do_cli("get", "tahoe:uploaded3.txt")) + d.addCallback(lambda res: self.do_cli("get", "tahoe:uploaded3.txt", + return_bytes=True)) d.addCallback(lambda rc_stdout_stderr: self.failUnlessReallyEqual(rc_stdout_stderr[1], DATA)) d.addCallback(lambda res: self.do_cli("put", rel_fn, "tahoe:subdir/uploaded4.txt")) d.addCallback(lambda res: - self.do_cli("get", "tahoe:subdir/uploaded4.txt")) + self.do_cli("get", "tahoe:subdir/uploaded4.txt", + return_bytes=True)) d.addCallback(lambda rc_stdout_stderr: self.failUnlessReallyEqual(rc_stdout_stderr[1], DATA)) def _get_dircap(res): - self.dircap = get_aliases(self.get_clientdir())["tahoe"] + self.dircap = str(get_aliases(self.get_clientdir())["tahoe"], "ascii") d.addCallback(_get_dircap) d.addCallback(lambda res: self.do_cli("put", rel_fn, self.dircap+":./uploaded5.txt")) d.addCallback(lambda res: - self.do_cli("get", "tahoe:uploaded5.txt")) + self.do_cli("get", "tahoe:uploaded5.txt", + return_bytes=True)) d.addCallback(lambda rc_stdout_stderr: self.failUnlessReallyEqual(rc_stdout_stderr[1], DATA)) @@ -143,7 +161,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): self.do_cli("put", rel_fn, self.dircap+":./subdir/uploaded6.txt")) d.addCallback(lambda res: - self.do_cli("get", "tahoe:subdir/uploaded6.txt")) + self.do_cli("get", "tahoe:subdir/uploaded6.txt", + return_bytes=True)) d.addCallback(lambda rc_stdout_stderr: self.failUnlessReallyEqual(rc_stdout_stderr[1], DATA)) @@ -158,10 +177,10 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): self.basedir = "cli/Put/mutable_unlinked" self.set_up_grid(oneshare=True) - DATA = "data" * 100 - DATA2 = "two" * 100 + DATA = b"data" * 100 + DATA2 = b"two" * 100 rel_fn = os.path.join(self.basedir, "DATAFILE") - DATA3 = "three" * 100 + DATA3 = b"three" * 100 fileutil.write(rel_fn, DATA3) d = self.do_cli("put", "--mutable", stdin=DATA) @@ -172,7 +191,7 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): self.filecap = out self.failUnless(self.filecap.startswith("URI:SSK:"), self.filecap) d.addCallback(_created) - d.addCallback(lambda res: self.do_cli("get", self.filecap)) + d.addCallback(lambda res: self.do_cli("get", self.filecap, return_bytes=True)) d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA)) d.addCallback(lambda res: self.do_cli("put", "-", self.filecap, stdin=DATA2)) @@ -182,7 +201,7 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessIn("200 OK", err) self.failUnlessReallyEqual(self.filecap, out) d.addCallback(_replaced) - d.addCallback(lambda res: self.do_cli("get", self.filecap)) + d.addCallback(lambda res: self.do_cli("get", self.filecap, return_bytes=True)) d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA2)) d.addCallback(lambda res: self.do_cli("put", rel_fn, self.filecap)) @@ -191,7 +210,7 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessIn("200 OK", err) self.failUnlessReallyEqual(self.filecap, out) d.addCallback(_replaced2) - d.addCallback(lambda res: self.do_cli("get", self.filecap)) + d.addCallback(lambda res: self.do_cli("get", self.filecap, return_bytes=True)) d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA3)) return d @@ -204,10 +223,10 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): self.basedir = "cli/Put/mutable" self.set_up_grid(oneshare=True) - DATA1 = "data" * 100 + DATA1 = b"data" * 100 fn1 = os.path.join(self.basedir, "DATA1") fileutil.write(fn1, DATA1) - DATA2 = "two" * 100 + DATA2 = b"two\xff" * 100 fn2 = os.path.join(self.basedir, "DATA2") fileutil.write(fn2, DATA2) @@ -229,7 +248,7 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessEqual(out, self.uri, str(res)) d.addCallback(_check2) d.addCallback(lambda res: - self.do_cli("get", "tahoe:uploaded.txt")) + self.do_cli("get", "tahoe:uploaded.txt", return_bytes=True)) d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA2)) return d @@ -429,26 +448,23 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): (rc, out, err) = args self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("error:", err) - self.failUnlessReallyEqual(out, "") + self.assertEqual(len(out), 0, out) d.addCallback(_check) return d def test_immutable_from_file_unicode(self): # tahoe put "\u00E0 trier.txt" "\u00E0 trier.txt" - try: - a_trier_arg = u"\u00E0 trier.txt".encode(get_io_encoding()) - except UnicodeEncodeError: - raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.") + a_trier_arg = u"\u00E0 trier.txt" skip_if_cannot_represent_filename(u"\u00E0 trier.txt") self.basedir = "cli/Put/immutable_from_file_unicode" self.set_up_grid(oneshare=True) - rel_fn = os.path.join(unicode(self.basedir), u"\u00E0 trier.txt") + rel_fn = os.path.join(str(self.basedir), u"\u00E0 trier.txt") # we make the file small enough to fit in a LIT file, for speed - DATA = "short file" + DATA = b"short file \xff bytes" fileutil.write(rel_fn, DATA) d = self.do_cli("create-alias", "tahoe") @@ -464,7 +480,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): d.addCallback(_uploaded) d.addCallback(lambda res: - self.do_cli("get", "tahoe:" + a_trier_arg)) + self.do_cli("get", "tahoe:" + a_trier_arg, + return_bytes=True)) d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA)) diff --git a/src/allmydata/test/cli/test_run.py b/src/allmydata/test/cli/test_run.py index d27791f34..6100d2568 100644 --- a/src/allmydata/test/cli/test_run.py +++ b/src/allmydata/test/cli/test_run.py @@ -1,6 +1,16 @@ """ Tests for ``allmydata.scripts.tahoe_run``. + +Ported to Python 3. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six.moves import ( StringIO, @@ -50,7 +60,7 @@ class DaemonizeTheRealServiceTests(SyncTestCase): """ nodedir = FilePath(self.mktemp()) nodedir.makedirs() - nodedir.child("tahoe.cfg").setContent(config) + nodedir.child("tahoe.cfg").setContent(config.encode("ascii")) nodedir.child("tahoe-client.tac").touch() options = parse_options(["run", nodedir.path]) diff --git a/src/allmydata/test/cli_node_api.py b/src/allmydata/test/cli_node_api.py index 4e4173924..be0381e11 100644 --- a/src/allmydata/test/cli_node_api.py +++ b/src/allmydata/test/cli_node_api.py @@ -154,6 +154,7 @@ class CLINodeAPI(object): exe = sys.executable argv = [ exe, + "-b", u"-m", u"allmydata.scripts.runner", ] + argv diff --git a/src/allmydata/test/common_util.py b/src/allmydata/test/common_util.py index 16f945239..caafbb81d 100644 --- a/src/allmydata/test/common_util.py +++ b/src/allmydata/test/common_util.py @@ -6,7 +6,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals -from future.utils import PY2, bchr, binary_type +from future.utils import PY2, PY3, bchr, binary_type from future.builtins import str as future_str if PY2: from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min # noqa: F401 @@ -15,7 +15,8 @@ import os import time import signal from random import randrange -from six.moves import StringIO +if PY2: + from StringIO import StringIO from io import ( TextIOWrapper, BytesIO, @@ -64,23 +65,28 @@ def run_cli_native(verb, *args, **kwargs): Most code should prefer ``run_cli_unicode`` which deals with all the necessary encoding considerations. - :param native_str verb: The command to run. For example, ``"create-node"``. + :param native_str verb: The command to run. For example, + ``"create-node"``. - :param [native_str] args: The arguments to pass to the command. For example, - ``("--hostname=localhost",)``. + :param [native_str] args: The arguments to pass to the command. For + example, ``("--hostname=localhost",)``. - :param [native_str] nodeargs: Extra arguments to pass to the Tahoe executable - before ``verb``. + :param [native_str] nodeargs: Extra arguments to pass to the Tahoe + executable before ``verb``. - :param native_str stdin: Text to pass to the command via stdin. + :param bytes|unicode stdin: Text or bytes to pass to the command via stdin. :param NoneType|str encoding: The name of an encoding which stdout and - stderr will be configured to use. ``None`` means stdout and stderr - will accept bytes and unicode and use the default system encoding for - translating between them. + stderr will be configured to use. ``None`` means matching default + behavior for the given Python version. + + :param bool return_bytes: If False, stdout/stderr is native string, + matching native behavior. If True, stdout/stderr are returned as + bytes. """ nodeargs = kwargs.pop("nodeargs", []) - encoding = kwargs.pop("encoding", None) + encoding = kwargs.pop("encoding", None) or "utf-8" + return_bytes = kwargs.pop("return_bytes", False) verb = maybe_unicode_to_argv(verb) args = [maybe_unicode_to_argv(a) for a in args] nodeargs = [maybe_unicode_to_argv(a) for a in nodeargs] @@ -93,36 +99,42 @@ def run_cli_native(verb, *args, **kwargs): ) argv = nodeargs + [verb] + list(args) stdin = kwargs.get("stdin", "") - if encoding is None: - if PY2: - # The original behavior, the Python 2 behavior, is to accept either - # bytes or unicode and try to automatically encode or decode as - # necessary. This works okay for ASCII and if LANG is set - # appropriately. These aren't great constraints so we should move - # away from this behavior. - stdout = StringIO() - stderr = StringIO() - else: - # Default on Python 3 is accepting text. - stdout = TextIOWrapper(BytesIO(), "utf-8") - stderr = TextIOWrapper(BytesIO(), "utf-8") + if PY2: + # The original behavior, the Python 2 behavior, is to accept either + # bytes or unicode and try to automatically encode or decode as + # necessary. This works okay for ASCII and if LANG is set + # appropriately. These aren't great constraints so we should move + # away from this behavior. + stdin = StringIO(stdin) + stdout = StringIO() + stderr = StringIO() else: # The new behavior, the Python 3 behavior, is to accept unicode and - # encode it using a specific encoding. For older versions of Python - # 3, the encoding is determined from LANG (bad) but for newer Python - # 3, the encoding is always utf-8 (good). Tests can pass in different - # encodings to exercise different behaviors. + # encode it using a specific encoding. For older versions of Python 3, + # the encoding is determined from LANG (bad) but for newer Python 3, + # the encoding is either LANG if it supports full Unicode, otherwise + # utf-8 (good). Tests can pass in different encodings to exercise + # different behaviors. + if isinstance(stdin, str): + stdin = stdin.encode(encoding) + stdin = TextIOWrapper(BytesIO(stdin), encoding) stdout = TextIOWrapper(BytesIO(), encoding) stderr = TextIOWrapper(BytesIO(), encoding) d = defer.succeed(argv) d.addCallback(runner.parse_or_exit_with_explanation, stdout=stdout) d.addCallback(runner.dispatch, - stdin=StringIO(stdin), + stdin=stdin, stdout=stdout, stderr=stderr) - def _done(rc): + def _done(rc, stdout=stdout, stderr=stderr): + if return_bytes and PY3: + stdout = stdout.buffer + stderr = stderr.buffer return 0, _getvalue(stdout), _getvalue(stderr) - def _err(f): + def _err(f, stdout=stdout, stderr=stderr): f.trap(SystemExit) + if return_bytes and PY3: + stdout = stdout.buffer + stderr = stderr.buffer return f.value.code, _getvalue(stdout), _getvalue(stderr) d.addCallbacks(_done, _err) return d diff --git a/src/allmydata/test/eliotutil.py b/src/allmydata/test/eliotutil.py index c2359f132..1685744fd 100644 --- a/src/allmydata/test/eliotutil.py +++ b/src/allmydata/test/eliotutil.py @@ -54,7 +54,7 @@ from twisted.python.monkey import ( MonkeyPatcher, ) -from ..util.jsonbytes import BytesJSONEncoder +from ..util.jsonbytes import AnyBytesJSONEncoder _NAME = Field.for_types( @@ -76,7 +76,7 @@ RUN_TEST = ActionType( if PY2: _memory_logger = MemoryLogger else: - _memory_logger = lambda: MemoryLogger(encoder=BytesJSONEncoder) + _memory_logger = lambda: MemoryLogger(encoder=AnyBytesJSONEncoder) @attr.s diff --git a/src/allmydata/test/test_deepcheck.py b/src/allmydata/test/test_deepcheck.py index baee1acbe..652e51ea5 100644 --- a/src/allmydata/test/test_deepcheck.py +++ b/src/allmydata/test/test_deepcheck.py @@ -17,10 +17,10 @@ from __future__ import unicode_literals # (Pdb) pp data # '334:12:b\'mutable-good\',90:URI:SSK-RO:... from past.builtins import unicode as str -from future.utils import PY3, PY2 +from future.utils import PY2 if PY2: from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401 - +from six import ensure_text import os, json from urllib.parse import quote as url_quote @@ -170,7 +170,8 @@ class DeepCheckBase(GridTestMixin, ErrorMixin, StallMixin, ShouldFailMixin, return data def parse_streamed_json(self, s): - for unit in s.split(b"\n"): + s = ensure_text(s) + for unit in s.split("\n"): if not unit: # stream should end with a newline, so split returns "" continue @@ -746,8 +747,6 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase): def do_test_cli_good(self, ignored): d = defer.succeed(None) - if PY3: # TODO fixme once Python 3 CLI porting is done - return d d.addCallback(lambda ign: self.do_cli_manifest_stream1()) d.addCallback(lambda ign: self.do_cli_manifest_stream2()) d.addCallback(lambda ign: self.do_cli_manifest_stream3()) @@ -758,7 +757,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase): return d def _check_manifest_storage_index(self, out): - lines = [l for l in out.split(b"\n") if l] + lines = [l.encode("utf-8") for l in out.split("\n") if l] self.failUnlessEqual(len(lines), 3) self.failUnless(base32.b2a(self.root.get_storage_index()) in lines) self.failUnless(base32.b2a(self.mutable.get_storage_index()) in lines) @@ -769,7 +768,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase): def _check(args): (rc, out, err) = args self.failUnlessEqual(err, "") - lines = [l for l in out.split(b"\n") if l] + lines = [l for l in out.split("\n") if l] self.failUnlessEqual(len(lines), 8) caps = {} for l in lines: @@ -778,7 +777,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase): except ValueError: cap = l.strip() path = "" - caps[cap] = path + caps[cap.encode("ascii")] = path self.failUnless(self.root.get_uri() in caps) self.failUnlessEqual(caps[self.root.get_uri()], "") self.failUnlessEqual(caps[self.mutable.get_uri()], "mutable") @@ -814,7 +813,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase): def _check(args): (rc, out, err) = args self.failUnlessEqual(err, "") - lines = [l for l in out.split(b"\n") if l] + lines = [l.encode("utf-8") for l in out.split("\n") if l] self.failUnlessEqual(len(lines), 3) self.failUnless(self.root.get_verify_cap().to_string() in lines) self.failUnless(self.mutable.get_verify_cap().to_string() in lines) @@ -827,7 +826,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase): def _check(args): (rc, out, err) = args self.failUnlessEqual(err, "") - lines = [l for l in out.split(b"\n") if l] + lines = [l.encode("utf-8") for l in out.split("\n") if l] self.failUnlessEqual(len(lines), 3) self.failUnless(self.root.get_repair_cap().to_string() in lines) self.failUnless(self.mutable.get_repair_cap().to_string() in lines) @@ -839,7 +838,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase): d = self.do_cli("stats", self.root_uri) def _check3(args): (rc, out, err) = args - lines = [l.strip() for l in out.split(b"\n") if l] + lines = [l.strip() for l in out.split("\n") if l] self.failUnless("count-immutable-files: 1" in lines) self.failUnless("count-mutable-files: 1" in lines) self.failUnless("count-literal-files: 3" in lines) diff --git a/src/allmydata/test/test_download.py b/src/allmydata/test/test_download.py index 3a42b0819..d61942839 100644 --- a/src/allmydata/test/test_download.py +++ b/src/allmydata/test/test_download.py @@ -1304,7 +1304,7 @@ class MyShare(object): self._dyhb_rtt = rtt def __repr__(self): - return "sh%d-on-%s" % (self._shnum, self._server.get_name()) + return "sh%d-on-%s" % (self._shnum, str(self._server.get_name(), "ascii")) class MySegmentFetcher(SegmentFetcher): def __init__(self, *args, **kwargs): @@ -1383,7 +1383,7 @@ class Selection(unittest.TestCase): self.failUnless(node.failed) self.failUnless(node.failed.check(NotEnoughSharesError)) sname = serverA.get_name() - self.failUnlessIn("complete= pending=sh0-on-%s overdue= unused=" % sname, + self.failUnlessIn("complete= pending=sh0-on-%s overdue= unused=" % str(sname, "ascii"), str(node.failed)) d.addCallback(_check2) return d @@ -1605,7 +1605,7 @@ class Selection(unittest.TestCase): self.failUnless(node.failed) self.failUnless(node.failed.check(NotEnoughSharesError)) sname = servers[b"peer-2"].get_name() - self.failUnlessIn("complete=sh0 pending= overdue=sh2-on-%s unused=" % sname, + self.failUnlessIn("complete=sh0 pending= overdue=sh2-on-%s unused=" % str(sname, "ascii"), str(node.failed)) d.addCallback(_check4) return d diff --git a/src/allmydata/test/test_eliotutil.py b/src/allmydata/test/test_eliotutil.py index aca677323..3f915ecd2 100644 --- a/src/allmydata/test/test_eliotutil.py +++ b/src/allmydata/test/test_eliotutil.py @@ -69,7 +69,7 @@ from ..util.eliotutil import ( _parse_destination_description, _EliotLogging, ) -from ..util.jsonbytes import BytesJSONEncoder +from ..util.jsonbytes import AnyBytesJSONEncoder from .common import ( SyncTestCase, @@ -109,7 +109,7 @@ class ParseDestinationDescriptionTests(SyncTestCase): reactor = object() self.assertThat( _parse_destination_description("file:-")(reactor), - Equals(FileDestination(stdout, encoder=BytesJSONEncoder)), + Equals(FileDestination(stdout, encoder=AnyBytesJSONEncoder)), ) diff --git a/src/allmydata/test/test_runner.py b/src/allmydata/test/test_runner.py index f6a7c2ee1..7cc89c287 100644 --- a/src/allmydata/test/test_runner.py +++ b/src/allmydata/test/test_runner.py @@ -88,7 +88,7 @@ def run_bintahoe(extra_argv, python_options=None): argv = [executable] if python_options is not None: argv.extend(python_options) - argv.extend([u"-m", u"allmydata.scripts.runner"]) + argv.extend([u"-b", u"-m", u"allmydata.scripts.runner"]) argv.extend(extra_argv) argv = list(unicode_to_argv(arg) for arg in argv) p = Popen(argv, stdout=PIPE, stderr=PIPE) @@ -515,7 +515,7 @@ class RunNode(common_util.SignalMixin, unittest.TestCase, pollmixin.PollMixin): 0, "Expected error message from '{}', got something else: {}".format( description, - p.get_buffered_output(), + str(p.get_buffered_output(), "utf-8"), ), ) diff --git a/src/allmydata/test/test_system.py b/src/allmydata/test/test_system.py index 040104b4c..12ae846eb 100644 --- a/src/allmydata/test/test_system.py +++ b/src/allmydata/test/test_system.py @@ -1,12 +1,12 @@ """ -Ported to Python 3, partially: test_filesystem* will be done in a future round. +Ported to Python 3. """ from __future__ import print_function from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals -from future.utils import PY2, PY3 +from future.utils import PY2 if PY2: # Don't import bytes since it causes issues on (so far unported) modules on Python 2. from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min, str # noqa: F401 @@ -16,7 +16,6 @@ from six import ensure_text, ensure_str import os, re, sys, time, json from functools import partial -from unittest import skipIf from bs4 import BeautifulSoup @@ -76,7 +75,7 @@ class RunBinTahoeMixin(object): # support env yet and is also synchronous. If we could get rid of # this in favor of that, though, it would probably be an improvement. command = sys.executable - argv = python_options + ["-m", "allmydata.scripts.runner"] + args + argv = python_options + ["-b", "-m", "allmydata.scripts.runner"] + args if env is None: env = os.environ @@ -1665,9 +1664,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): d.addCallback(self.log, "did _check_publish_private") d.addCallback(self._test_web) d.addCallback(self._test_control) - if PY2: - # TODO when CLI is ported to Python 3, reenable. - d.addCallback(self._test_cli) + d.addCallback(self._test_cli) # P now has four top-level children: # P/personal/sekrit data # P/s2-ro/ @@ -2298,7 +2295,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): def _check_aliases_1(out_and_err): (out, err) = out_and_err self.failUnlessEqual(err, "") - self.failUnlessEqual(out.strip(" \n"), "tahoe: %s" % private_uri) + self.failUnlessEqual(out.strip(" \n"), "tahoe: %s" % str(private_uri, "ascii")) d.addCallback(_check_aliases_1) # now that that's out of the way, remove root_dir.cap and work with @@ -2355,7 +2352,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): (out, err) = out_and_err self.failUnlessEqual(err, "") if filenum is not None: - self.failUnlessEqual(out, datas[filenum]) + self.failUnlessEqual(out, str(datas[filenum], "ascii")) if data is not None: self.failUnlessEqual(out, data) @@ -2369,7 +2366,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): uri0 = out.strip() return run(None, "get", uri0) d.addCallback(_put_out) - d.addCallback(lambda out_err: self.failUnlessEqual(out_err[0], datas[0])) + d.addCallback(lambda out_err: self.failUnlessEqual(out_err[0], str(datas[0], "ascii"))) d.addCallback(run, "put", files[1], "subdir/tahoe-file1") # tahoe put bar tahoe:FOO @@ -2411,14 +2408,14 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): def _check_outfile0(out_and_err): (out, err) = out_and_err data = open(outfile0,"rb").read() - self.failUnlessEqual(data, "data to be uploaded: file2\n") + self.failUnlessEqual(data, b"data to be uploaded: file2\n") d.addCallback(_check_outfile0) outfile1 = os.path.join(self.basedir, "outfile0") d.addCallback(run, "get", "tahoe:subdir/tahoe-file1", outfile1) def _check_outfile1(out_and_err): (out, err) = out_and_err data = open(outfile1,"rb").read() - self.failUnlessEqual(data, "data to be uploaded: file1\n") + self.failUnlessEqual(data, b"data to be uploaded: file1\n") d.addCallback(_check_outfile1) d.addCallback(run, "unlink", "tahoe-file0") @@ -2455,7 +2452,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): if "file3" in l: rw_uri = self._mutable_file3_uri u = uri.from_string_mutable_filenode(rw_uri) - ro_uri = u.get_readonly().to_string() + ro_uri = str(u.get_readonly().to_string(), "ascii") self.failUnless(ro_uri in l) d.addCallback(_check_ls_rouri) @@ -2528,17 +2525,17 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): dn = os.path.join(self.basedir, "dir1") os.makedirs(dn) with open(os.path.join(dn, "rfile1"), "wb") as f: - f.write("rfile1") + f.write(b"rfile1") with open(os.path.join(dn, "rfile2"), "wb") as f: - f.write("rfile2") + f.write(b"rfile2") with open(os.path.join(dn, "rfile3"), "wb") as f: - f.write("rfile3") + f.write(b"rfile3") sdn2 = os.path.join(dn, "subdir2") os.makedirs(sdn2) with open(os.path.join(sdn2, "rfile4"), "wb") as f: - f.write("rfile4") + f.write(b"rfile4") with open(os.path.join(sdn2, "rfile5"), "wb") as f: - f.write("rfile5") + f.write(b"rfile5") # from disk into tahoe d.addCallback(run, "cp", "-r", dn, "tahoe:") @@ -2582,7 +2579,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): (out, err) = out_and_err x = open(os.path.join(dn_copy2, "dir1", "subdir2", "rfile4")).read() y = uri.from_string_filenode(x) - self.failUnlessEqual(y.data, "rfile4") + self.failUnlessEqual(y.data, b"rfile4") d.addCallback(_check_capsonly) # and tahoe-to-tahoe @@ -2615,7 +2612,6 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): return d - @skipIf(PY3, "Python 3 CLI support hasn't happened yet.") def test_filesystem_with_cli_in_subprocess(self): # We do this in a separate test so that test_filesystem doesn't skip if we can't run bin/tahoe. @@ -2659,9 +2655,9 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): def _check_ls(res): out, err, rc_or_sig = res self.failUnlessEqual(rc_or_sig, 0, str(res)) - self.failUnlessEqual(err, "", str(res)) - self.failUnlessIn("tahoe-moved", out) - self.failIfIn("tahoe-file", out) + self.failUnlessEqual(err, b"", str(res)) + self.failUnlessIn(b"tahoe-moved", out) + self.failIfIn(b"tahoe-file", out) d.addCallback(_check_ls) return d diff --git a/src/allmydata/test/test_util.py b/src/allmydata/test/test_util.py index 9887897cf..4c2e98683 100644 --- a/src/allmydata/test/test_util.py +++ b/src/allmydata/test/test_util.py @@ -495,10 +495,10 @@ class YAML(unittest.TestCase): class JSONBytes(unittest.TestCase): - """Tests for BytesJSONEncoder.""" + """Tests for jsonbytes module.""" def test_encode_bytes(self): - """BytesJSONEncoder can encode bytes. + """jsonbytes.dumps() encodes bytes. Bytes are presumed to be UTF-8 encoded. """ @@ -515,7 +515,7 @@ class JSONBytes(unittest.TestCase): self.assertEqual(jsonbytes.loads(encoded), expected) def test_encode_unicode(self): - """BytesJSONEncoder encodes Unicode string as usual.""" + """jsonbytes.dumps() encodes Unicode string as usual.""" expected = { u"hello": [1, u"cd"], } @@ -529,6 +529,37 @@ class JSONBytes(unittest.TestCase): self.assertIsInstance(encoded, bytes) self.assertEqual(json.loads(encoded, encoding="utf-8"), x) + def test_any_bytes_unsupported_by_default(self): + """By default non-UTF-8 bytes raise error.""" + bytestring = b"abc\xff\x00" + with self.assertRaises(UnicodeDecodeError): + jsonbytes.dumps(bytestring) + with self.assertRaises(UnicodeDecodeError): + jsonbytes.dumps_bytes(bytestring) + with self.assertRaises(UnicodeDecodeError): + json.dumps(bytestring, cls=jsonbytes.UTF8BytesJSONEncoder) + + def test_any_bytes(self): + """If any_bytes is True, non-UTF-8 bytes don't break encoding.""" + bytestring = b"abc\xff\xff123" + o = {bytestring: bytestring} + expected = {"abc\\xff\\xff123": "abc\\xff\\xff123"} + self.assertEqual( + json.loads(jsonbytes.dumps(o, any_bytes=True)), + expected, + ) + self.assertEqual( + json.loads(json.dumps( + o, cls=jsonbytes.AnyBytesJSONEncoder)), + expected, + ) + self.assertEqual( + json.loads(jsonbytes.dumps(o, any_bytes=True), + encoding="utf-8"), + expected, + ) + + class FakeGetVersion(object): """Emulate an object with a get_version.""" diff --git a/src/allmydata/test/web/test_logs.py b/src/allmydata/test/web/test_logs.py index 5d697f910..89ec7ba42 100644 --- a/src/allmydata/test/web/test_logs.py +++ b/src/allmydata/test/web/test_logs.py @@ -92,7 +92,7 @@ class TestStreamingLogs(unittest.TestCase): @inlineCallbacks def test_one_log(self): """ - write a single Eliot log and see it streamed via websocket + Write a single Eliot log action and see it streamed via websocket. """ proto = yield self.agent.open( @@ -106,14 +106,18 @@ class TestStreamingLogs(unittest.TestCase): proto.on("message", got_message) @log_call(action_type=u"test:cli:some-exciting-action") - def do_a_thing(): + def do_a_thing(arguments): pass - do_a_thing() + do_a_thing(arguments=[u"hello", b"good-\xff-day", 123, {"a": 35}, [None]]) proto.transport.loseConnection() yield proto.is_closed self.assertEqual(len(messages), 2) + self.assertEqual(messages[0]["action_type"], "test:cli:some-exciting-action") + self.assertEqual(messages[0]["arguments"], + ["hello", "good-\\xff-day", 123, {"a": 35}, [None]]) + self.assertEqual(messages[1]["action_type"], "test:cli:some-exciting-action") self.assertEqual("started", messages[0]["action_status"]) self.assertEqual("succeeded", messages[1]["action_status"]) diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py index 3003d2909..5f78af626 100644 --- a/src/allmydata/util/_python3.py +++ b/src/allmydata/util/_python3.py @@ -98,6 +98,7 @@ PORTED_MODULES = [ "allmydata.storage.shares", "allmydata.test", "allmydata.test.cli", + "allmydata.test.cli.common", "allmydata.test.cli_node_api", "allmydata.test.common", "allmydata.test.common_util", @@ -176,8 +177,16 @@ PORTED_TEST_MODULES = [ "allmydata.test.cli.test_alias", "allmydata.test.cli.test_backup", "allmydata.test.cli.test_backupdb", + "allmydata.test.cli.test_check", + "allmydata.test.cli.test_cli", + "allmydata.test.cli.test_cp", "allmydata.test.cli.test_create", + "allmydata.test.cli.test_create_alias", "allmydata.test.cli.test_invite", + "allmydata.test.cli.test_list", + "allmydata.test.cli.test_mv", + "allmydata.test.cli.test_put", + "allmydata.test.cli.test_run", "allmydata.test.cli.test_status", "allmydata.test.mutable.test_checker", @@ -209,11 +218,7 @@ PORTED_TEST_MODULES = [ "allmydata.test.test_consumer", "allmydata.test.test_crawler", "allmydata.test.test_crypto", - - # Only partially ported, CLI-using test code is disabled for now until CLI - # is ported. "allmydata.test.test_deepcheck", - "allmydata.test.test_deferredutil", "allmydata.test.test_dictutil", "allmydata.test.test_dirnode", @@ -251,12 +256,7 @@ PORTED_TEST_MODULES = [ "allmydata.test.test_storage", "allmydata.test.test_storage_client", "allmydata.test.test_storage_web", - - # Only partially ported, test_filesystem_with_cli_in_subprocess isn't - # ported yet, nor is part of test_filesystem (the call to _test_cli). This - # should be done once CLI is ported. "allmydata.test.test_system", - "allmydata.test.test_testing", "allmydata.test.test_time_format", "allmydata.test.test_tor_provider", diff --git a/src/allmydata/util/eliotutil.py b/src/allmydata/util/eliotutil.py index 5d144eb1d..4e48fbb9f 100644 --- a/src/allmydata/util/eliotutil.py +++ b/src/allmydata/util/eliotutil.py @@ -87,7 +87,7 @@ from twisted.internet.defer import ( ) from twisted.application.service import Service -from .jsonbytes import BytesJSONEncoder +from .jsonbytes import AnyBytesJSONEncoder def validateInstanceOf(t): @@ -306,7 +306,7 @@ class _DestinationParser(object): rotateLength=rotate_length, maxRotatedFiles=max_rotated_files, ) - return lambda reactor: FileDestination(get_file(), BytesJSONEncoder) + return lambda reactor: FileDestination(get_file(), AnyBytesJSONEncoder) _parse_destination_description = _DestinationParser().parse @@ -333,4 +333,4 @@ def log_call_deferred(action_type): if PY2: capture_logging = eliot_capture_logging else: - capture_logging = partial(eliot_capture_logging, encoder_=BytesJSONEncoder) + capture_logging = partial(eliot_capture_logging, encoder_=AnyBytesJSONEncoder) diff --git a/src/allmydata/util/jsonbytes.py b/src/allmydata/util/jsonbytes.py index c46a932d0..f6143f4d1 100644 --- a/src/allmydata/util/jsonbytes.py +++ b/src/allmydata/util/jsonbytes.py @@ -14,45 +14,100 @@ if PY2: from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import json +import codecs + +if PY2: + def backslashreplace_py2(ex): + """ + On Python 2 'backslashreplace' error handler doesn't work, so write our + own. + """ + return ''.join('\\x{:02x}'.format(ord(c)) + for c in ex.object[ex.start:ex.end]), ex.end + + codecs.register_error("backslashreplace_tahoe_py2", backslashreplace_py2) -def _bytes_to_unicode(obj): - """Convert any bytes objects to unicode, recursively.""" - if isinstance(obj, bytes): - return obj.decode("utf-8") - if isinstance(obj, dict): - new_obj = {} - for k, v in obj.items(): - if isinstance(k, bytes): - k = k.decode("utf-8") - v = _bytes_to_unicode(v) - new_obj[k] = v - return new_obj - if isinstance(obj, (list, set, tuple)): - return [_bytes_to_unicode(i) for i in obj] - return obj +def _bytes_to_unicode(any_bytes, obj): + """Create a function that recursively converts bytes to unicode. - -class BytesJSONEncoder(json.JSONEncoder): + :param any_bytes: If True, also support non-UTF-8-encoded bytes. + :param obj: Object to de-byte-ify. """ - A JSON encoder than can also encode bytes. + errors = "backslashreplace" if any_bytes else "strict" + if PY2 and errors == "backslashreplace": + errors = "backslashreplace_tahoe_py2" - The bytes are assumed to be UTF-8 encoded Unicode strings. + def doit(obj): + """Convert any bytes objects to unicode, recursively.""" + if isinstance(obj, bytes): + return obj.decode("utf-8", errors=errors) + if isinstance(obj, dict): + new_obj = {} + for k, v in obj.items(): + if isinstance(k, bytes): + k = k.decode("utf-8", errors=errors) + v = doit(v) + new_obj[k] = v + return new_obj + if isinstance(obj, (list, set, tuple)): + return [doit(i) for i in obj] + return obj + + return doit(obj) + + +class UTF8BytesJSONEncoder(json.JSONEncoder): """ + A JSON encoder than can also encode UTF-8 encoded strings. + """ + def encode(self, o, **kwargs): + return json.JSONEncoder.encode( + self, _bytes_to_unicode(False, o), **kwargs) + def iterencode(self, o, **kwargs): - return json.JSONEncoder.iterencode(self, _bytes_to_unicode(o), **kwargs) + return json.JSONEncoder.iterencode( + self, _bytes_to_unicode(False, o), **kwargs) + + +class AnyBytesJSONEncoder(json.JSONEncoder): + """ + A JSON encoder than can also encode bytes of any sort. + + Bytes are decoded to strings using UTF-8, if that fails to decode then the + bytes are quoted. + """ + def encode(self, o, **kwargs): + return json.JSONEncoder.encode( + self, _bytes_to_unicode(True, o), **kwargs) + + def iterencode(self, o, **kwargs): + return json.JSONEncoder.iterencode( + self, _bytes_to_unicode(True, o), **kwargs) def dumps(obj, *args, **kwargs): """Encode to JSON, supporting bytes as keys or values. - The bytes are assumed to be UTF-8 encoded Unicode strings. + :param bool any_bytes: If False (the default) the bytes are assumed to be + UTF-8 encoded Unicode strings. If True, non-UTF-8 bytes are quoted for + human consumption. """ - return json.dumps(obj, cls=BytesJSONEncoder, *args, **kwargs) + any_bytes = kwargs.pop("any_bytes", False) + if any_bytes: + cls = AnyBytesJSONEncoder + else: + cls = UTF8BytesJSONEncoder + return json.dumps(obj, cls=cls, *args, **kwargs) def dumps_bytes(obj, *args, **kwargs): - """Encode to JSON, then encode as bytes.""" + """Encode to JSON, then encode as bytes. + + :param bool any_bytes: If False (the default) the bytes are assumed to be + UTF-8 encoded Unicode strings. If True, non-UTF-8 bytes are quoted for + human consumption. + """ result = dumps(obj, *args, **kwargs) if PY3: result = result.encode("utf-8") diff --git a/src/allmydata/web/logs.py b/src/allmydata/web/logs.py index a78e9cd12..a79440eb9 100644 --- a/src/allmydata/web/logs.py +++ b/src/allmydata/web/logs.py @@ -8,8 +8,6 @@ from __future__ import ( division, ) -import json - from autobahn.twisted.resource import WebSocketResource from autobahn.twisted.websocket import ( WebSocketServerFactory, @@ -21,6 +19,8 @@ from twisted.web.resource import ( Resource, ) +from allmydata.util import jsonbytes as json + class TokenAuthenticatedWebSocketServerProtocol(WebSocketServerProtocol): """ @@ -47,10 +47,7 @@ class TokenAuthenticatedWebSocketServerProtocol(WebSocketServerProtocol): """ # probably want a try/except around here? what do we do if # transmission fails or anything else bad happens? - encoded = json.dumps(message) - if isinstance(encoded, str): - # On Python 3 dumps() returns Unicode... - encoded = encoded.encode("utf-8") + encoded = json.dumps_bytes(message, any_bytes=True) self.sendMessage(encoded) def onOpen(self):