Merge pull request #1039 from tahoe-lafs/3674.cli-tests-python-3

Port a bit more cli tests python 3

Fixes ticket:3674
This commit is contained in:
Itamar Turner-Trauring 2021-04-13 09:39:13 -04:00 committed by GitHub
commit 4ea1f153cc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 133 additions and 75 deletions

0
newsfragments/3674.minor Normal file
View File

View File

@ -351,7 +351,7 @@ class BackupOptions(FileStoreOptions):
line. The file is assumed to be in the argv encoding."""
abs_filepath = argv_to_abspath(filepath)
try:
exclude_file = file(abs_filepath)
exclude_file = open(abs_filepath)
except:
raise BackupConfigurationError('Error opening exclude file %s.' % quote_local_unicode_path(abs_filepath))
try:

View File

@ -1,7 +1,6 @@
# coding: utf-8
from __future__ import print_function
from six import ensure_str
import os, sys, textwrap
import codecs
@ -22,11 +21,13 @@ from yaml import (
from future.utils import PY2
if PY2:
from future.builtins import str # noqa: F401
else:
from typing import Union
from twisted.python import usage
from allmydata.util.assertutil import precondition
from allmydata.util.encodingutil import unicode_to_url, quote_output, \
from allmydata.util.encodingutil import quote_output, \
quote_local_unicode_path, argv_to_abspath
from allmydata.scripts.default_nodedir import _default_nodedir
@ -274,18 +275,27 @@ def get_alias(aliases, path_unicode, default):
return uri.from_string_dirnode(aliases[alias]).to_string(), path[colon+1:]
def escape_path(path):
# type: (str) -> str
# type: (Union[str,bytes]) -> str
u"""
Return path quoted to US-ASCII, valid URL characters.
>>> path = u'/føö/bar/☃'
>>> escaped = escape_path(path)
>>> str(escaped)
'/f%C3%B8%C3%B6/bar/%E2%98%83'
>>> escaped.encode('ascii').decode('ascii') == escaped
True
>>> escaped
u'/f%C3%B8%C3%B6/bar/%E2%98%83'
"""
segments = path.split("/")
result = "/".join([urllib.parse.quote(unicode_to_url(s)) for s in segments])
result = ensure_str(result, "ascii")
if isinstance(path, str):
path = path.encode("utf-8")
segments = path.split(b"/")
result = str(
b"/".join([
urllib.parse.quote(s).encode("ascii") for s in segments
]),
"ascii"
)
# Eventually (i.e. as part of Python 3 port) we want this to always return
# Unicode strings. However, to reduce diff sizes in the short term it'll
# return native string (i.e. bytes) on Python 2.
if PY2:
result = result.encode("ascii").__native__()
return result

View File

@ -449,12 +449,13 @@ def create_node(config):
v = remote_config.get(k, None)
if v is not None:
# we're faking usually argv-supplied options :/
v_orig = v
if isinstance(v, str):
v = v.encode(get_io_encoding())
config[k] = v
if k not in sensitive_keys:
if k not in ['shares-happy', 'shares-total', 'shares-needed']:
print(" {}: {}".format(k, v), file=out)
print(" {}: {}".format(k, v_orig), file=out)
else:
print(" {}: [sensitive data; see tahoe.cfg]".format(k), file=out)

View File

@ -1,14 +1,16 @@
from __future__ import print_function
from past.builtins import unicode
import os.path
import time
import urllib
import json
from urllib.parse import quote as url_quote
import datetime
from allmydata.scripts.common import get_alias, escape_path, DEFAULT_ALIAS, \
UnknownAliasError
from allmydata.scripts.common_http import do_http, HTTPError, format_http_error
from allmydata.util import time_format
from allmydata.util import time_format, jsonbytes as json
from allmydata.scripts import backupdb
from allmydata.util.encodingutil import listdir_unicode, quote_output, \
quote_local_unicode_path, to_bytes, FilenameEncodingError, unicode_to_url
@ -52,7 +54,7 @@ def mkdir(contents, options):
def put_child(dirurl, childname, childcap):
assert dirurl[-1] != "/"
url = dirurl + "/" + urllib.quote(unicode_to_url(childname)) + "?t=uri"
url = dirurl + "/" + url_quote(unicode_to_url(childname)) + "?t=uri"
resp = do_http("PUT", url, childcap)
if resp.status not in (200, 201):
raise HTTPError("Error during put_child", resp)
@ -97,7 +99,7 @@ class BackerUpper(object):
except UnknownAliasError as e:
e.display(stderr)
return 1
to_url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
to_url = nodeurl + "uri/%s/" % url_quote(rootcap)
if path:
to_url += escape_path(path)
if not to_url.endswith("/"):
@ -165,7 +167,7 @@ class BackerUpper(object):
if must_create:
self.verboseprint(" creating directory for %s" % quote_local_unicode_path(path))
newdircap = mkdir(create_contents, self.options)
assert isinstance(newdircap, str)
assert isinstance(newdircap, bytes)
if r:
r.did_create(newdircap)
return True, newdircap
@ -192,7 +194,7 @@ class BackerUpper(object):
filecap = r.was_uploaded()
self.verboseprint("checking %s" % quote_output(filecap))
nodeurl = self.options['node-url']
checkurl = nodeurl + "uri/%s?t=check&output=JSON" % urllib.quote(filecap)
checkurl = nodeurl + "uri/%s?t=check&output=JSON" % url_quote(filecap)
self._files_checked += 1
resp = do_http("POST", checkurl)
if resp.status != 200:
@ -225,7 +227,7 @@ class BackerUpper(object):
dircap = r.was_created()
self.verboseprint("checking %s" % quote_output(dircap))
nodeurl = self.options['node-url']
checkurl = nodeurl + "uri/%s?t=check&output=JSON" % urllib.quote(dircap)
checkurl = nodeurl + "uri/%s?t=check&output=JSON" % url_quote(dircap)
self._directories_checked += 1
resp = do_http("POST", checkurl)
if resp.status != 200:
@ -345,7 +347,7 @@ class FileTarget(object):
target = PermissionDeniedTarget(self._path, isdir=False)
return target.backup(progress, upload_file, upload_directory)
else:
assert isinstance(childcap, str)
assert isinstance(childcap, bytes)
if created:
return progress.created_file(self._path, childcap, metadata)
return progress.reused_file(self._path, childcap, metadata)
@ -525,12 +527,12 @@ class BackupProgress(object):
return self, {
os.path.basename(create_path): create_value
for (create_path, create_value)
in self._create_contents.iteritems()
in self._create_contents.items()
if os.path.dirname(create_path) == dirpath
}, {
os.path.basename(compare_path): compare_value
for (compare_path, compare_value)
in self._compare_contents.iteritems()
in self._compare_contents.items()
if os.path.dirname(compare_path) == dirpath
}

View File

@ -1,6 +1,6 @@
from __future__ import print_function
import urllib
from urllib.parse import quote as url_quote
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
UnknownAliasError
from allmydata.scripts.common_http import do_http, format_http_error
@ -20,7 +20,7 @@ def get(options):
except UnknownAliasError as e:
e.display(stderr)
return 1
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
url = nodeurl + "uri/%s" % url_quote(rootcap)
if path:
url += "/" + escape_path(path)
@ -30,6 +30,10 @@ def get(options):
outf = open(to_file, "wb")
else:
outf = stdout
# Make sure we can write bytes; on Python 3 stdout is Unicode by
# default.
if getattr(outf, "encoding", None) is not None:
outf = outf.buffer
while True:
data = resp.read(4096)
if not data:

View File

@ -1,6 +1,10 @@
from __future__ import print_function
import urllib, time
from past.builtins import unicode
from six import ensure_text, ensure_str
import time
from urllib.parse import quote as url_quote
import json
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
UnknownAliasError
@ -23,7 +27,7 @@ def list(options):
except UnknownAliasError as e:
e.display(stderr)
return 1
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
url = nodeurl + "uri/%s" % url_quote(rootcap)
if path:
# move where.endswith check here?
url += "/" + escape_path(path)
@ -149,9 +153,9 @@ def list(options):
line.append(quote_output(name) + classify)
if options["uri"]:
line.append(uri)
line.append(ensure_str(uri))
if options["readonly-uri"]:
line.append(quote_output(ro_uri or "-", quotemarks=False))
line.append(quote_output(ensure_str(ro_uri) or "-", quotemarks=False))
rows.append((encoding_error, line))
@ -164,7 +168,7 @@ def list(options):
while len(left_justifys) <= i:
left_justifys.append(False)
max_widths[i] = max(max_widths[i], len(cell))
if cell.startswith("URI"):
if ensure_text(cell).startswith("URI"):
left_justifys[i] = True
if len(left_justifys) == 1:
left_justifys[0] = True

View File

@ -1,3 +1,18 @@
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import __builtin__ as builtins
else:
import builtins
import os.path
from six.moves import cStringIO as StringIO
from datetime import timedelta
@ -6,7 +21,6 @@ import re
from twisted.trial import unittest
from twisted.python.monkey import MonkeyPatcher
import __builtin__
from allmydata.util import fileutil
from allmydata.util.fileutil import abspath_expanduser_unicode
from allmydata.util.encodingutil import get_io_encoding, unicode_to_argv
@ -86,7 +100,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
d.addCallback(lambda res: do_backup(True))
def _check0(args):
(rc, out, err) = args
self.failUnlessReallyEqual(err, "")
self.assertEqual(len(err), 0, err)
self.failUnlessReallyEqual(rc, 0)
(
files_uploaded,
@ -143,40 +157,40 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
d.addCallback(lambda res: self.do_cli("ls", "--uri", "tahoe:backups"))
def _check1(args):
(rc, out, err) = args
self.failUnlessReallyEqual(err, "")
self.assertEqual(len(err), 0, err)
self.failUnlessReallyEqual(rc, 0)
lines = out.split("\n")
children = dict([line.split() for line in lines if line])
latest_uri = children["Latest"]
self.failUnless(latest_uri.startswith("URI:DIR2-CHK:"), latest_uri)
childnames = children.keys()
childnames = list(children.keys())
self.failUnlessReallyEqual(sorted(childnames), ["Archives", "Latest"])
d.addCallback(_check1)
d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest"))
def _check2(args):
(rc, out, err) = args
self.failUnlessReallyEqual(err, "")
self.assertEqual(len(err), 0, err)
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(sorted(out.split()), ["empty", "parent"])
d.addCallback(_check2)
d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest/empty"))
def _check2a(args):
(rc, out, err) = args
self.failUnlessReallyEqual(err, "")
self.assertEqual(len(err), 0, err)
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(out.strip(), "")
self.assertFalse(out.strip())
d.addCallback(_check2a)
d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
def _check3(args):
(rc, out, err) = args
self.failUnlessReallyEqual(err, "")
self.assertFalse(err)
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(out, "foo")
self.assertEqual(out, "foo")
d.addCallback(_check3)
d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
def _check4(args):
(rc, out, err) = args
self.failUnlessReallyEqual(err, "")
self.assertFalse(err)
self.failUnlessReallyEqual(rc, 0)
self.old_archives = out.split()
self.failUnlessReallyEqual(len(self.old_archives), 1)
@ -189,7 +203,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
# second backup should reuse everything, if the backupdb is
# available
(rc, out, err) = args
self.failUnlessReallyEqual(err, "")
self.assertFalse(err)
self.failUnlessReallyEqual(rc, 0)
fu, fr, fs, dc, dr, ds = self.count_output(out)
# foo.txt, bar.txt, blah.txt
@ -221,7 +235,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
# the directories should have been changed, so we should
# re-use all of them too.
(rc, out, err) = args
self.failUnlessReallyEqual(err, "")
self.assertFalse(err)
self.failUnlessReallyEqual(rc, 0)
fu, fr, fs, dc, dr, ds = self.count_output(out)
fchecked, dchecked = self.count_output2(out)
@ -238,7 +252,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
def _check5(args):
(rc, out, err) = args
self.failUnlessReallyEqual(err, "")
self.assertFalse(err)
self.failUnlessReallyEqual(rc, 0)
self.new_archives = out.split()
self.failUnlessReallyEqual(len(self.new_archives), 3, out)
@ -265,7 +279,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
# second backup should reuse bar.txt (if backupdb is available),
# and upload the rest. None of the directories can be reused.
(rc, out, err) = args
self.failUnlessReallyEqual(err, "")
self.assertFalse(err)
self.failUnlessReallyEqual(rc, 0)
fu, fr, fs, dc, dr, ds = self.count_output(out)
# new foo.txt, surprise file, subfile, empty
@ -281,7 +295,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
def _check6(args):
(rc, out, err) = args
self.failUnlessReallyEqual(err, "")
self.assertFalse(err)
self.failUnlessReallyEqual(rc, 0)
self.new_archives = out.split()
self.failUnlessReallyEqual(len(self.new_archives), 4)
@ -291,17 +305,17 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
def _check7(args):
(rc, out, err) = args
self.failUnlessReallyEqual(err, "")
self.assertFalse(err)
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(out, "FOOF!")
self.assertEqual(out, "FOOF!")
# the old snapshot should not be modified
return self.do_cli("get", "tahoe:backups/Archives/%s/parent/subdir/foo.txt" % self.old_archives[0])
d.addCallback(_check7)
def _check8(args):
(rc, out, err) = args
self.failUnlessReallyEqual(err, "")
self.assertFalse(err)
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(out, "foo")
self.assertEqual(out, "foo")
d.addCallback(_check8)
return d
@ -382,7 +396,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
self._check_filtering(filtered, root_listdir, (u'_darcs', u'subdir'),
(nice_doc, u'lib.a'))
# read exclude patterns from file
exclusion_string = doc_pattern_arg + "\nlib.?"
exclusion_string = doc_pattern_arg + b"\nlib.?"
excl_filepath = os.path.join(basedir, 'exclusion')
fileutil.write(excl_filepath, exclusion_string)
backup_options = parse(['--exclude-from', excl_filepath, 'from', 'to'])
@ -407,12 +421,16 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
ns = Namespace()
ns.called = False
original_open = open
def call_file(name, *args):
ns.called = True
self.failUnlessEqual(name, abspath_expanduser_unicode(exclude_file))
return StringIO()
if name.endswith("excludes.dummy"):
ns.called = True
self.failUnlessEqual(name, abspath_expanduser_unicode(exclude_file))
return StringIO()
else:
return original_open(name, *args)
patcher = MonkeyPatcher((__builtin__, 'file', call_file))
patcher = MonkeyPatcher((builtins, 'open', call_file))
patcher.runWithPatches(parse_options, basedir, "backup", ['--exclude-from', unicode_to_argv(exclude_file), 'from', 'to'])
self.failUnless(ns.called)
@ -584,7 +602,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
(rc, out, err) = args
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("error:", err)
self.failUnlessReallyEqual(out, "")
self.assertEqual(len(out), 0)
d.addCallback(_check)
return d
@ -600,6 +618,6 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("error:", err)
self.failUnlessIn("nonexistent", err)
self.failUnlessReallyEqual(out, "")
self.assertEqual(len(out), 0)
d.addCallback(_check)
return d

View File

@ -94,13 +94,18 @@ def run_cli_native(verb, *args, **kwargs):
argv = nodeargs + [verb] + list(args)
stdin = kwargs.get("stdin", "")
if encoding is None:
# The original behavior, the Python 2 behavior, is to accept either
# bytes or unicode and try to automatically encode or decode as
# necessary. This works okay for ASCII and if LANG is set
# appropriately. These aren't great constraints so we should move
# away from this behavior.
stdout = StringIO()
stderr = StringIO()
if PY2:
# The original behavior, the Python 2 behavior, is to accept either
# bytes or unicode and try to automatically encode or decode as
# necessary. This works okay for ASCII and if LANG is set
# appropriately. These aren't great constraints so we should move
# away from this behavior.
stdout = StringIO()
stderr = StringIO()
else:
# Default on Python 3 is accepting text.
stdout = TextIOWrapper(BytesIO(), "utf-8")
stderr = TextIOWrapper(BytesIO(), "utf-8")
else:
# The new behavior, the Python 3 behavior, is to accept unicode and
# encode it using a specific encoding. For older versions of Python

View File

@ -1,4 +1,16 @@
#!/usr/bin/python
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from six import ensure_binary
import os
from twisted.python.filepath import FilePath
@ -43,7 +55,7 @@ class MultiIntroTests(unittest.TestCase):
u'intro2':{ 'furl': 'furl4' },
},
}
self.yaml_path.setContent(yamlutil.safe_dump(connections))
self.yaml_path.setContent(ensure_binary(yamlutil.safe_dump(connections)))
# get a client and count of introducer_clients
myclient = yield create_client(self.basedir)
ic_count = len(myclient.introducer_clients)
@ -73,7 +85,7 @@ class MultiIntroTests(unittest.TestCase):
tahoe_cfg_furl = myclient.introducer_clients[0].introducer_furl
# assertions
self.failUnlessEqual(fake_furl, tahoe_cfg_furl)
self.failUnlessEqual(fake_furl, str(tahoe_cfg_furl, "utf-8"))
self.assertEqual(
list(
warning["message"]
@ -97,10 +109,10 @@ class MultiIntroTests(unittest.TestCase):
u'default': { 'furl': 'furl1' },
},
}
self.yaml_path.setContent(yamlutil.safe_dump(connections))
self.yaml_path.setContent(ensure_binary(yamlutil.safe_dump(connections)))
FilePath(self.basedir).child("tahoe.cfg").setContent(
"[client]\n"
"introducer.furl = furl1\n"
b"[client]\n"
b"introducer.furl = furl1\n"
)
with self.assertRaises(ValueError) as ctx:
@ -112,7 +124,7 @@ class MultiIntroTests(unittest.TestCase):
"please fix impossible configuration.",
)
SIMPLE_YAML = """
SIMPLE_YAML = b"""
introducers:
one:
furl: furl1
@ -121,7 +133,7 @@ introducers:
# this format was recommended in docs/configuration.rst in 1.12.0, but it
# isn't correct (the "furl = furl1" line is recorded as the string value of
# the ["one"] key, instead of being parsed as a single-key dictionary).
EQUALS_YAML = """
EQUALS_YAML = b"""
introducers:
one: furl = furl1
"""
@ -147,17 +159,17 @@ class NoDefault(unittest.TestCase):
connections = {'introducers': {
u'one': { 'furl': 'furl1' },
}}
self.yaml_path.setContent(yamlutil.safe_dump(connections))
self.yaml_path.setContent(ensure_binary(yamlutil.safe_dump(connections)))
myclient = yield create_client(self.basedir)
tahoe_cfg_furl = myclient.introducer_clients[0].introducer_furl
self.assertEquals(tahoe_cfg_furl, 'furl1')
self.assertEquals(tahoe_cfg_furl, b'furl1')
@defer.inlineCallbacks
def test_real_yaml(self):
self.yaml_path.setContent(SIMPLE_YAML)
myclient = yield create_client(self.basedir)
tahoe_cfg_furl = myclient.introducer_clients[0].introducer_furl
self.assertEquals(tahoe_cfg_furl, 'furl1')
self.assertEquals(tahoe_cfg_furl, b'furl1')
@defer.inlineCallbacks
def test_invalid_equals_yaml(self):
@ -172,6 +184,6 @@ class NoDefault(unittest.TestCase):
@defer.inlineCallbacks
def test_introducerless(self):
connections = {'introducers': {} }
self.yaml_path.setContent(yamlutil.safe_dump(connections))
self.yaml_path.setContent(ensure_binary(yamlutil.safe_dump(connections)))
myclient = yield create_client(self.basedir)
self.assertEquals(len(myclient.introducer_clients), 0)

View File

@ -174,6 +174,7 @@ PORTED_MODULES = [
PORTED_TEST_MODULES = [
"allmydata.test.cli.test_alias",
"allmydata.test.cli.test_backup",
"allmydata.test.cli.test_backupdb",
"allmydata.test.cli.test_create",
"allmydata.test.cli.test_invite",
@ -234,6 +235,7 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_json_metadata",
"allmydata.test.test_log",
"allmydata.test.test_monitor",
"allmydata.test.test_multi_introducers",
"allmydata.test.test_netstring",
"allmydata.test.test_no_network",
"allmydata.test.test_node",