Merge pull request #1064 from tahoe-lafs/3716.allmydata-scripts-python-3-part-1

Port allmydata.scripts to Python 3, part 1

Fixes ticket:3716
This commit is contained in:
Itamar Turner-Trauring 2021-06-02 09:53:32 -04:00 committed by GitHub
commit 59dfd2c062
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 168 additions and 101 deletions

View File

@ -514,10 +514,10 @@ Command Examples
the pattern will be matched against any level of the directory tree;
it's still impossible to specify absolute path exclusions.
``tahoe backup --exclude-from=/path/to/filename ~ work:backups``
``tahoe backup --exclude-from-utf-8=/path/to/filename ~ work:backups``
``--exclude-from`` is similar to ``--exclude``, but reads exclusion
patterns from ``/path/to/filename``, one per line.
``--exclude-from-utf-8`` is similar to ``--exclude``, but reads exclusion
patterns from a UTF-8-encoded ``/path/to/filename``, one per line.
``tahoe backup --exclude-vcs ~ work:backups``

View File

@ -0,0 +1 @@
tahoe backup's --exclude-from has been renamed to --exclude-from-utf-8, and correspondingly requires the file to be UTF-8 encoded.

View File

@ -1,6 +1,15 @@
"""
Ported to Python 3.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from past.builtins import unicode
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from six import ensure_binary
try:
@ -25,9 +34,9 @@ def print_keypair(options):
from allmydata.crypto import ed25519
out = options.stdout
private_key, public_key = ed25519.create_signing_keypair()
print("private:", unicode(ed25519.string_from_signing_key(private_key), "ascii"),
print("private:", str(ed25519.string_from_signing_key(private_key), "ascii"),
file=out)
print("public:", unicode(ed25519.string_from_verifying_key(public_key), "ascii"),
print("public:", str(ed25519.string_from_verifying_key(public_key), "ascii"),
file=out)
class DerivePubkeyOptions(BaseOptions):
@ -52,8 +61,8 @@ def derive_pubkey(options):
privkey_vs = options.privkey
privkey_vs = ensure_binary(privkey_vs)
private_key, public_key = ed25519.signing_keypair_from_string(privkey_vs)
print("private:", unicode(ed25519.string_from_signing_key(private_key), "ascii"), file=out)
print("public:", unicode(ed25519.string_from_verifying_key(public_key), "ascii"), file=out)
print("private:", str(ed25519.string_from_signing_key(private_key), "ascii"), file=out)
print("public:", str(ed25519.string_from_verifying_key(public_key), "ascii"), file=out)
return 0
class AdminCommand(BaseOptions):

View File

@ -1,5 +1,15 @@
"""
Ported to Python 3.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os.path, sys, time, random, stat
from allmydata.util.netstring import netstring

View File

@ -1,6 +1,15 @@
"""
Ported to Python 3.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from past.builtins import unicode
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os.path, re, fnmatch
@ -38,7 +47,7 @@ class FileStoreOptions(BaseOptions):
# compute a node-url from the existing options, put in self['node-url']
if self['node-url']:
if (not isinstance(self['node-url'], (bytes, unicode))
if (not isinstance(self['node-url'], (bytes, str))
or not NODEURL_RE.match(self['node-url'])):
msg = ("--node-url is required to be a string and look like "
"\"http://HOSTNAMEORADDR:PORT\", not: %r" %
@ -348,14 +357,15 @@ class BackupOptions(FileStoreOptions):
exclude = self['exclude']
exclude.add(g)
def opt_exclude_from(self, filepath):
def opt_exclude_from_utf_8(self, filepath):
"""Ignore file matching glob patterns listed in file, one per
line. The file is assumed to be in the argv encoding."""
abs_filepath = argv_to_abspath(filepath)
try:
exclude_file = open(abs_filepath)
except:
raise BackupConfigurationError('Error opening exclude file %s.' % quote_local_unicode_path(abs_filepath))
exclude_file = open(abs_filepath, "r", encoding="utf-8")
except Exception as e:
raise BackupConfigurationError('Error opening exclude file %s. (Error: %s)' % (
quote_local_unicode_path(abs_filepath), e))
try:
for line in exclude_file:
self.opt_exclude(line)

View File

@ -1,7 +1,21 @@
# coding: utf-8
"""
Ported to Python 3.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
else:
from typing import Union
import os, sys, textwrap
import codecs
from os.path import join
@ -17,13 +31,6 @@ from yaml import (
safe_dump,
)
# Python 2 compatibility
from future.utils import PY2
if PY2:
from future.builtins import str # noqa: F401
else:
from typing import Union
from twisted.python import usage
from allmydata.util.assertutil import precondition

View File

@ -1,5 +1,15 @@
"""
Ported to Python 3.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os
from io import BytesIO
from six.moves import urllib, http_client

View File

@ -1,12 +1,20 @@
"""
Ported to Python 3.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2, bchr
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
try:
from allmydata.scripts.types_ import SubCommands
except ImportError:
pass
from future.utils import bchr
from past.builtins import unicode
# do not import any allmydata modules at this level. Do that from inside
# individual functions instead.
@ -94,7 +102,7 @@ def dump_immutable_chk_share(f, out, options):
def to_string(v):
if isinstance(v, bytes):
return unicode(v, "utf-8")
return str(v, "utf-8")
else:
return str(v)
@ -173,9 +181,9 @@ def format_expiration_time(expiration_time):
remains = expiration_time - now
when = "%ds" % remains
if remains > 24*3600:
when += " (%d days)" % (remains / (24*3600))
when += " (%d days)" % (remains // (24*3600))
elif remains > 3600:
when += " (%d hours)" % (remains / 3600)
when += " (%d hours)" % (remains // 3600)
return when
@ -205,7 +213,7 @@ def dump_mutable_share(options):
print(file=out)
print("Mutable slot found:", file=out)
print(" share_type: %s" % share_type, file=out)
print(" write_enabler: %s" % unicode(base32.b2a(WE), "utf-8"), file=out)
print(" write_enabler: %s" % str(base32.b2a(WE), "utf-8"), file=out)
print(" WE for nodeid: %s" % idlib.nodeid_b2a(nodeid), file=out)
print(" num_extra_leases: %d" % num_extra_leases, file=out)
print(" container_size: %d" % container_size, file=out)
@ -217,8 +225,8 @@ def dump_mutable_share(options):
print(" ownerid: %d" % lease.owner_num, file=out)
when = format_expiration_time(lease.expiration_time)
print(" expires in %s" % when, file=out)
print(" renew_secret: %s" % unicode(base32.b2a(lease.renew_secret), "utf-8"), file=out)
print(" cancel_secret: %s" % unicode(base32.b2a(lease.cancel_secret), "utf-8"), file=out)
print(" renew_secret: %s" % str(base32.b2a(lease.renew_secret), "utf-8"), file=out)
print(" cancel_secret: %s" % str(base32.b2a(lease.cancel_secret), "utf-8"), file=out)
print(" secrets are for nodeid: %s" % idlib.nodeid_b2a(lease.nodeid), file=out)
else:
print("No leases.", file=out)
@ -266,8 +274,8 @@ def dump_SDMF_share(m, length, options):
print(" SDMF contents:", file=out)
print(" seqnum: %d" % seqnum, file=out)
print(" root_hash: %s" % unicode(base32.b2a(root_hash), "utf-8"), file=out)
print(" IV: %s" % unicode(base32.b2a(IV), "utf-8"), file=out)
print(" root_hash: %s" % str(base32.b2a(root_hash), "utf-8"), file=out)
print(" IV: %s" % str(base32.b2a(IV), "utf-8"), file=out)
print(" required_shares: %d" % k, file=out)
print(" total_shares: %d" % N, file=out)
print(" segsize: %d" % segsize, file=out)
@ -360,7 +368,7 @@ def dump_MDMF_share(m, length, options):
print(" MDMF contents:", file=out)
print(" seqnum: %d" % seqnum, file=out)
print(" root_hash: %s" % unicode(base32.b2a(root_hash), "utf-8"), file=out)
print(" root_hash: %s" % str(base32.b2a(root_hash), "utf-8"), file=out)
#print(" IV: %s" % base32.b2a(IV), file=out)
print(" required_shares: %d" % k, file=out)
print(" total_shares: %d" % N, file=out)
@ -485,19 +493,19 @@ def _dump_secrets(storage_index, secret, nodeid, out):
if secret:
crs = hashutil.my_renewal_secret_hash(secret)
print(" client renewal secret:", unicode(base32.b2a(crs), "ascii"), file=out)
print(" client renewal secret:", str(base32.b2a(crs), "ascii"), file=out)
frs = hashutil.file_renewal_secret_hash(crs, storage_index)
print(" file renewal secret:", unicode(base32.b2a(frs), "ascii"), file=out)
print(" file renewal secret:", str(base32.b2a(frs), "ascii"), file=out)
if nodeid:
renew = hashutil.bucket_renewal_secret_hash(frs, nodeid)
print(" lease renewal secret:", unicode(base32.b2a(renew), "ascii"), file=out)
print(" lease renewal secret:", str(base32.b2a(renew), "ascii"), file=out)
ccs = hashutil.my_cancel_secret_hash(secret)
print(" client cancel secret:", unicode(base32.b2a(ccs), "ascii"), file=out)
print(" client cancel secret:", str(base32.b2a(ccs), "ascii"), file=out)
fcs = hashutil.file_cancel_secret_hash(ccs, storage_index)
print(" file cancel secret:", unicode(base32.b2a(fcs), "ascii"), file=out)
print(" file cancel secret:", str(base32.b2a(fcs), "ascii"), file=out)
if nodeid:
cancel = hashutil.bucket_cancel_secret_hash(fcs, nodeid)
print(" lease cancel secret:", unicode(base32.b2a(cancel), "ascii"), file=out)
print(" lease cancel secret:", str(base32.b2a(cancel), "ascii"), file=out)
def dump_uri_instance(u, nodeid, secret, out, show_header=True):
from allmydata import uri
@ -508,19 +516,19 @@ def dump_uri_instance(u, nodeid, secret, out, show_header=True):
if isinstance(u, uri.CHKFileURI):
if show_header:
print("CHK File:", file=out)
print(" key:", unicode(base32.b2a(u.key), "ascii"), file=out)
print(" UEB hash:", unicode(base32.b2a(u.uri_extension_hash), "ascii"), file=out)
print(" key:", str(base32.b2a(u.key), "ascii"), file=out)
print(" UEB hash:", str(base32.b2a(u.uri_extension_hash), "ascii"), file=out)
print(" size:", u.size, file=out)
print(" k/N: %d/%d" % (u.needed_shares, u.total_shares), file=out)
print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out)
print(" storage index:", str(si_b2a(u.get_storage_index()), "ascii"), file=out)
_dump_secrets(u.get_storage_index(), secret, nodeid, out)
elif isinstance(u, uri.CHKFileVerifierURI):
if show_header:
print("CHK Verifier URI:", file=out)
print(" UEB hash:", unicode(base32.b2a(u.uri_extension_hash), "ascii"), file=out)
print(" UEB hash:", str(base32.b2a(u.uri_extension_hash), "ascii"), file=out)
print(" size:", u.size, file=out)
print(" k/N: %d/%d" % (u.needed_shares, u.total_shares), file=out)
print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out)
print(" storage index:", str(si_b2a(u.get_storage_index()), "ascii"), file=out)
elif isinstance(u, uri.LiteralFileURI):
if show_header:
@ -530,52 +538,52 @@ def dump_uri_instance(u, nodeid, secret, out, show_header=True):
elif isinstance(u, uri.WriteableSSKFileURI): # SDMF
if show_header:
print("SDMF Writeable URI:", file=out)
print(" writekey:", unicode(base32.b2a(u.writekey), "ascii"), file=out)
print(" readkey:", unicode(base32.b2a(u.readkey), "ascii"), file=out)
print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out)
print(" fingerprint:", unicode(base32.b2a(u.fingerprint), "ascii"), file=out)
print(" writekey:", str(base32.b2a(u.writekey), "ascii"), file=out)
print(" readkey:", str(base32.b2a(u.readkey), "ascii"), file=out)
print(" storage index:", str(si_b2a(u.get_storage_index()), "ascii"), file=out)
print(" fingerprint:", str(base32.b2a(u.fingerprint), "ascii"), file=out)
print(file=out)
if nodeid:
we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid)
print(" write_enabler:", unicode(base32.b2a(we), "ascii"), file=out)
print(" write_enabler:", str(base32.b2a(we), "ascii"), file=out)
print(file=out)
_dump_secrets(u.get_storage_index(), secret, nodeid, out)
elif isinstance(u, uri.ReadonlySSKFileURI):
if show_header:
print("SDMF Read-only URI:", file=out)
print(" readkey:", unicode(base32.b2a(u.readkey), "ascii"), file=out)
print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out)
print(" fingerprint:", unicode(base32.b2a(u.fingerprint), "ascii"), file=out)
print(" readkey:", str(base32.b2a(u.readkey), "ascii"), file=out)
print(" storage index:", str(si_b2a(u.get_storage_index()), "ascii"), file=out)
print(" fingerprint:", str(base32.b2a(u.fingerprint), "ascii"), file=out)
elif isinstance(u, uri.SSKVerifierURI):
if show_header:
print("SDMF Verifier URI:", file=out)
print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out)
print(" fingerprint:", unicode(base32.b2a(u.fingerprint), "ascii"), file=out)
print(" storage index:", str(si_b2a(u.get_storage_index()), "ascii"), file=out)
print(" fingerprint:", str(base32.b2a(u.fingerprint), "ascii"), file=out)
elif isinstance(u, uri.WriteableMDMFFileURI): # MDMF
if show_header:
print("MDMF Writeable URI:", file=out)
print(" writekey:", unicode(base32.b2a(u.writekey), "ascii"), file=out)
print(" readkey:", unicode(base32.b2a(u.readkey), "ascii"), file=out)
print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out)
print(" fingerprint:", unicode(base32.b2a(u.fingerprint), "ascii"), file=out)
print(" writekey:", str(base32.b2a(u.writekey), "ascii"), file=out)
print(" readkey:", str(base32.b2a(u.readkey), "ascii"), file=out)
print(" storage index:", str(si_b2a(u.get_storage_index()), "ascii"), file=out)
print(" fingerprint:", str(base32.b2a(u.fingerprint), "ascii"), file=out)
print(file=out)
if nodeid:
we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid)
print(" write_enabler:", unicode(base32.b2a(we), "ascii"), file=out)
print(" write_enabler:", str(base32.b2a(we), "ascii"), file=out)
print(file=out)
_dump_secrets(u.get_storage_index(), secret, nodeid, out)
elif isinstance(u, uri.ReadonlyMDMFFileURI):
if show_header:
print("MDMF Read-only URI:", file=out)
print(" readkey:", unicode(base32.b2a(u.readkey), "ascii"), file=out)
print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out)
print(" fingerprint:", unicode(base32.b2a(u.fingerprint), "ascii"), file=out)
print(" readkey:", str(base32.b2a(u.readkey), "ascii"), file=out)
print(" storage index:", str(si_b2a(u.get_storage_index()), "ascii"), file=out)
print(" fingerprint:", str(base32.b2a(u.fingerprint), "ascii"), file=out)
elif isinstance(u, uri.MDMFVerifierURI):
if show_header:
print("MDMF Verifier URI:", file=out)
print(" storage index:", unicode(si_b2a(u.get_storage_index()), "ascii"), file=out)
print(" fingerprint:", unicode(base32.b2a(u.fingerprint), "ascii"), file=out)
print(" storage index:", str(si_b2a(u.get_storage_index()), "ascii"), file=out)
print(" fingerprint:", str(base32.b2a(u.fingerprint), "ascii"), file=out)
elif isinstance(u, uri.ImmutableDirectoryURI): # CHK-based directory
@ -623,7 +631,7 @@ class FindSharesOptions(BaseOptions):
def parseArgs(self, storage_index_s, *nodedirs):
from allmydata.util.encodingutil import argv_to_abspath
self.si_s = storage_index_s
self.nodedirs = map(argv_to_abspath, nodedirs)
self.nodedirs = list(map(argv_to_abspath, nodedirs))
description = """
Locate all shares for the given storage index. This command looks through one
@ -666,7 +674,7 @@ def find_shares(options):
class CatalogSharesOptions(BaseOptions):
def parseArgs(self, *nodedirs):
from allmydata.util.encodingutil import argv_to_abspath
self.nodedirs = map(argv_to_abspath, nodedirs)
self.nodedirs = list(map(argv_to_abspath, nodedirs))
if not nodedirs:
raise usage.UsageError("must specify at least one node directory")
@ -753,7 +761,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
print("SDMF %s %d/%d %d #%d:%s %d %s" % \
(si_s, k, N, datalen,
seqnum, unicode(base32.b2a(root_hash), "utf-8"),
seqnum, str(base32.b2a(root_hash), "utf-8"),
expiration, quote_output(abs_sharefile)), file=out)
elif share_type == "MDMF":
from allmydata.mutable.layout import MDMFSlotReadProxy
@ -782,7 +790,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
offsets) = verinfo
print("MDMF %s %d/%d %d #%d:%s %d %s" % \
(si_s, k, N, datalen,
seqnum, unicode(base32.b2a(root_hash), "utf-8"),
seqnum, str(base32.b2a(root_hash), "utf-8"),
expiration, quote_output(abs_sharefile)), file=out)
else:
print("UNKNOWN mutable %s" % quote_output(abs_sharefile), file=out)
@ -816,7 +824,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
ueb_hash = unpacked["UEB_hash"]
print("CHK %s %d/%d %d %s %d %s" % (si_s, k, N, filesize,
unicode(ueb_hash, "utf-8"), expiration,
str(ueb_hash, "utf-8"), expiration,
quote_output(abs_sharefile)), file=out)
else:
@ -990,7 +998,7 @@ def fixOptionsClass(args):
class FlogtoolOptions(foolscap_cli.Options):
def __init__(self):
super(FlogtoolOptions, self).__init__()
self.subCommands = map(fixOptionsClass, self.subCommands)
self.subCommands = list(map(fixOptionsClass, self.subCommands))
def getSynopsis(self):
return "Usage: tahoe [global-options] debug flogtool COMMAND [flogtool-options]"

View File

@ -6,19 +6,14 @@ from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2, PY3
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import __builtin__ as builtins
else:
import builtins
from six import ensure_str
import os.path
from six.moves import cStringIO as StringIO
from datetime import timedelta
import re
import locale
from twisted.trial import unittest
from twisted.python.monkey import MonkeyPatcher
@ -358,14 +353,14 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
exclusion_string = "_darcs\n*py\n.svn"
excl_filepath = os.path.join(basedir, 'exclusion')
fileutil.write(excl_filepath, exclusion_string)
backup_options = parse(['--exclude-from', excl_filepath, 'from', 'to'])
backup_options = parse(['--exclude-from-utf-8', excl_filepath, 'from', 'to'])
filtered = list(backup_options.filter_listdir(subdir_listdir))
self._check_filtering(filtered, subdir_listdir, (u'another_doc.lyx', u'CVS'),
(u'.svn', u'_darcs', u'run_snake_run.py'))
# test BackupConfigurationError
self.failUnlessRaises(cli.BackupConfigurationError,
parse,
['--exclude-from', excl_filepath + '.no', 'from', 'to'])
['--exclude-from-utf-8', excl_filepath + '.no', 'from', 'to'])
# test that an iterator works too
backup_options = parse(['--exclude', '*lyx', 'from', 'to'])
@ -376,7 +371,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
def test_exclude_options_unicode(self):
nice_doc = u"nice_d\u00F8c.lyx"
try:
doc_pattern_arg = u"*d\u00F8c*"
doc_pattern_arg_unicode = doc_pattern_arg = u"*d\u00F8c*"
if PY2:
doc_pattern_arg = doc_pattern_arg.encode(get_io_encoding())
except UnicodeEncodeError:
@ -400,14 +395,10 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
self._check_filtering(filtered, root_listdir, (u'_darcs', u'subdir'),
(nice_doc, u'lib.a'))
# read exclude patterns from file
exclusion_string = doc_pattern_arg + ensure_str("\nlib.?")
if PY3:
# On Python 2 this gives some garbage encoding. Also on Python 2 we
# expect exclusion string to be bytes.
exclusion_string = exclusion_string.encode(locale.getpreferredencoding(False))
exclusion_string = (doc_pattern_arg_unicode + "\nlib.?").encode("utf-8")
excl_filepath = os.path.join(basedir, 'exclusion')
fileutil.write(excl_filepath, exclusion_string)
backup_options = parse(['--exclude-from', excl_filepath, 'from', 'to'])
backup_options = parse(['--exclude-from-utf-8', excl_filepath, 'from', 'to'])
filtered = list(backup_options.filter_listdir(root_listdir))
self._check_filtering(filtered, root_listdir, (u'_darcs', u'subdir'),
(nice_doc, u'lib.a'))
@ -430,16 +421,20 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
ns = Namespace()
ns.called = False
original_open = open
def call_file(name, *args):
def call_file(name, *args, **kwargs):
if name.endswith("excludes.dummy"):
ns.called = True
self.failUnlessEqual(name, abspath_expanduser_unicode(exclude_file))
return StringIO()
else:
return original_open(name, *args)
return original_open(name, *args, **kwargs)
patcher = MonkeyPatcher((builtins, 'open', call_file))
patcher.runWithPatches(parse_options, basedir, "backup", ['--exclude-from', unicode_to_argv(exclude_file), 'from', 'to'])
if PY2:
from allmydata.scripts import cli as module_to_patch
else:
import builtins as module_to_patch
patcher = MonkeyPatcher((module_to_patch, 'open', call_file))
patcher.runWithPatches(parse_options, basedir, "backup", ['--exclude-from-utf-8', unicode_to_argv(exclude_file), 'from', 'to'])
self.failUnless(ns.called)
def test_ignore_symlinks(self):

View File

@ -15,7 +15,7 @@ from six.moves import cStringIO as StringIO
from allmydata import uri
from allmydata.util import base32
from allmydata.util.encodingutil import to_bytes
from allmydata.util.encodingutil import to_bytes, quote_output_u
from allmydata.mutable.publish import MutableData
from allmydata.immutable import upload
from allmydata.scripts import debug
@ -168,7 +168,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
self.uris = {}
self.fileurls = {}
DATA = b"data" * 100
quoted_good = u"'g\u00F6\u00F6d'"
quoted_good = quote_output_u("g\u00F6\u00F6d")
d = c0.create_dirnode()
def _stash_root_and_create_file(n):

View File

@ -10,7 +10,6 @@ from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from six import ensure_text
from six.moves import StringIO
import os.path
from twisted.trial import unittest
@ -20,7 +19,7 @@ from allmydata.util import fileutil
from allmydata.scripts.common import get_aliases
from allmydata.scripts import cli, runner
from ..no_network import GridTestMixin
from allmydata.util.encodingutil import quote_output
from allmydata.util.encodingutil import quote_output_u
from .common import CLITestMixin
class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
@ -182,7 +181,7 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
(rc, out, err) = args
self.failUnlessReallyEqual(rc, 0)
self.assertEqual(len(err), 0, err)
self.failUnlessIn(u"Alias %s created" % ensure_text(quote_output(etudes_arg)), out)
self.failUnlessIn(u"Alias %s created" % (quote_output_u(etudes_arg),), out)
aliases = get_aliases(self.get_clientdir())
self.failUnless(aliases[u"\u00E9tudes"].startswith(b"URI:DIR2:"))

View File

@ -130,9 +130,10 @@ class Integration(GridTestMixin, CLITestMixin, unittest.TestCase):
d.addCallback(_check)
return d
@mock.patch('sys.stdout')
def test_help(self, fake):
return self.do_cli('status', '--help')
@defer.inlineCallbacks
def test_help(self):
rc, _, _ = yield self.do_cli('status', '--help')
self.assertEqual(rc, 0)
class CommandStatus(unittest.TestCase):

View File

@ -12,6 +12,7 @@ if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min # noqa: F401
import os
import sys
import time
import signal
from random import randrange
@ -85,7 +86,7 @@ def run_cli_native(verb, *args, **kwargs):
bytes.
"""
nodeargs = kwargs.pop("nodeargs", [])
encoding = kwargs.pop("encoding", None) or "utf-8"
encoding = kwargs.pop("encoding", None) or getattr(sys.stdout, "encoding") or "utf-8"
return_bytes = kwargs.pop("return_bytes", False)
verb = maybe_unicode_to_argv(verb)
args = [maybe_unicode_to_argv(a) for a in args]

View File

@ -379,7 +379,10 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
check(u"\n", u"\"\\x0a\"", quote_newlines=True)
def test_quote_output_default(self):
self.test_quote_output_utf8(None)
"""Default is the encoding of sys.stdout if known, otherwise utf-8."""
encoding = getattr(sys.stdout, "encoding") or "utf-8"
self.assertEqual(quote_output(u"\u2621"),
quote_output(u"\u2621", encoding=encoding))
def win32_other(win32, other):

View File

@ -91,7 +91,13 @@ PORTED_MODULES = [
"allmydata.node",
"allmydata.nodemaker",
"allmydata.scripts",
"allmydata.scripts.admin",
"allmydata.scripts.backupdb",
"allmydata.scripts.cli",
"allmydata.scripts.common_http",
"allmydata.scripts.common",
"allmydata.scripts.create_node",
"allmydata.scripts.debug",
"allmydata.scripts.runner",
"allmydata.scripts.types_",
"allmydata.stats",

View File

@ -256,7 +256,11 @@ def quote_output_u(*args, **kwargs):
result = quote_output(*args, **kwargs)
if isinstance(result, unicode):
return result
return result.decode(kwargs.get("encoding", None) or io_encoding)
# Since we're quoting, the assumption is this will be read by a human, and
# therefore printed, so stdout's encoding is the plausible one. io_encoding
# is now always utf-8.
return result.decode(kwargs.get("encoding", None) or
getattr(sys.stdout, "encoding") or io_encoding)
def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
@ -276,7 +280,10 @@ def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
On Python 3, returns Unicode strings.
"""
precondition(isinstance(s, (bytes, unicode)), s)
encoding = encoding or io_encoding
# Since we're quoting, the assumption is this will be read by a human, and
# therefore printed, so stdout's encoding is the plausible one. io_encoding
# is now always utf-8.
encoding = encoding or getattr(sys.stdout, "encoding") or io_encoding
if quote_newlines is None:
quote_newlines = quotemarks
@ -284,7 +291,7 @@ def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
def _encode(s):
if isinstance(s, bytes):
try:
s = s.decode('utf-8')
s = s.decode("utf-8")
except UnicodeDecodeError:
return b'b"%s"' % (ESCAPABLE_8BIT.sub(lambda m: _bytes_escape(m, quote_newlines), s),)