mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-19 13:07:56 +00:00
Merge branch '3679.more-cli-tests-python-3' into 3687.cli-tests-python-3
This commit is contained in:
commit
e48d85dd8b
0
newsfragments/3678.minor
Normal file
0
newsfragments/3678.minor
Normal file
0
newsfragments/3679.minor
Normal file
0
newsfragments/3679.minor
Normal file
@ -224,7 +224,7 @@ class CpOptions(FileStoreOptions):
|
||||
def parseArgs(self, *args):
|
||||
if len(args) < 2:
|
||||
raise usage.UsageError("cp requires at least two arguments")
|
||||
self.sources = map(argv_to_unicode, args[:-1])
|
||||
self.sources = list(map(argv_to_unicode, args[:-1]))
|
||||
self.destination = argv_to_unicode(args[-1])
|
||||
|
||||
synopsis = "[options] FROM.. TO"
|
||||
@ -435,7 +435,7 @@ class CheckOptions(FileStoreOptions):
|
||||
("add-lease", None, "Add/renew lease on all shares."),
|
||||
]
|
||||
def parseArgs(self, *locations):
|
||||
self.locations = map(argv_to_unicode, locations)
|
||||
self.locations = list(map(argv_to_unicode, locations))
|
||||
|
||||
synopsis = "[options] [ALIAS:PATH]"
|
||||
description = """
|
||||
@ -452,7 +452,7 @@ class DeepCheckOptions(FileStoreOptions):
|
||||
("verbose", "v", "Be noisy about what is happening."),
|
||||
]
|
||||
def parseArgs(self, *locations):
|
||||
self.locations = map(argv_to_unicode, locations)
|
||||
self.locations = list(map(argv_to_unicode, locations))
|
||||
|
||||
synopsis = "[options] [ALIAS:PATH]"
|
||||
description = """
|
||||
|
@ -1,12 +1,15 @@
|
||||
from __future__ import print_function
|
||||
|
||||
from past.builtins import unicode
|
||||
from six import ensure_str
|
||||
|
||||
import os, time
|
||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
UnknownAliasError
|
||||
from allmydata.scripts.common_http import do_http, format_http_error
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.encodingutil import quote_output, is_printable_ascii
|
||||
import urllib
|
||||
from urllib.parse import quote as url_quote
|
||||
import json
|
||||
|
||||
class SlowOperationRunner(object):
|
||||
@ -14,7 +17,7 @@ class SlowOperationRunner(object):
|
||||
def run(self, options):
|
||||
stderr = options.stderr
|
||||
self.options = options
|
||||
self.ophandle = ophandle = base32.b2a(os.urandom(16))
|
||||
self.ophandle = ophandle = ensure_str(base32.b2a(os.urandom(16)))
|
||||
nodeurl = options['node-url']
|
||||
if not nodeurl.endswith("/"):
|
||||
nodeurl += "/"
|
||||
@ -25,9 +28,10 @@ class SlowOperationRunner(object):
|
||||
except UnknownAliasError as e:
|
||||
e.display(stderr)
|
||||
return 1
|
||||
path = unicode(path, "utf-8")
|
||||
if path == '/':
|
||||
path = ''
|
||||
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
||||
url = nodeurl + "uri/%s" % url_quote(rootcap)
|
||||
if path:
|
||||
url += "/" + escape_path(path)
|
||||
# todo: should it end with a slash?
|
||||
|
@ -1,6 +1,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import urllib
|
||||
from urllib.parse import quote as url_quote
|
||||
import json
|
||||
|
||||
# Python 2 compatibility
|
||||
@ -34,9 +34,10 @@ def check_location(options, where):
|
||||
except UnknownAliasError as e:
|
||||
e.display(stderr)
|
||||
return 1
|
||||
path = str(path, "utf-8")
|
||||
if path == '/':
|
||||
path = ''
|
||||
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
||||
url = nodeurl + "uri/%s" % url_quote(rootcap)
|
||||
if path:
|
||||
url += "/" + escape_path(path)
|
||||
# todo: should it end with a slash?
|
||||
@ -52,7 +53,8 @@ def check_location(options, where):
|
||||
if resp.status != 200:
|
||||
print(format_http_error("ERROR", resp), file=stderr)
|
||||
return 1
|
||||
jdata = resp.read()
|
||||
jdata = resp.read().decode()
|
||||
|
||||
if options.get("raw"):
|
||||
stdout.write(jdata)
|
||||
stdout.write("\n")
|
||||
@ -139,7 +141,7 @@ class DeepCheckOutput(LineOnlyReceiver, object):
|
||||
if self.in_error:
|
||||
print(quote_output(line, quotemarks=False), file=self.stderr)
|
||||
return
|
||||
if line.startswith("ERROR:"):
|
||||
if line.startswith(b"ERROR:"):
|
||||
self.in_error = True
|
||||
self.streamer.rc = 1
|
||||
print(quote_output(line, quotemarks=False), file=self.stderr)
|
||||
@ -202,7 +204,7 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver, object):
|
||||
if self.in_error:
|
||||
print(quote_output(line, quotemarks=False), file=self.stderr)
|
||||
return
|
||||
if line.startswith("ERROR:"):
|
||||
if line.startswith(b"ERROR:"):
|
||||
self.in_error = True
|
||||
self.streamer.rc = 1
|
||||
print(quote_output(line, quotemarks=False), file=self.stderr)
|
||||
@ -295,9 +297,10 @@ class DeepCheckStreamer(LineOnlyReceiver, object):
|
||||
except UnknownAliasError as e:
|
||||
e.display(stderr)
|
||||
return 1
|
||||
path = str(path, "utf-8")
|
||||
if path == '/':
|
||||
path = ''
|
||||
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
||||
url = nodeurl + "uri/%s" % url_quote(rootcap)
|
||||
if path:
|
||||
url += "/" + escape_path(path)
|
||||
# todo: should it end with a slash?
|
||||
@ -322,7 +325,7 @@ class DeepCheckStreamer(LineOnlyReceiver, object):
|
||||
if not chunk:
|
||||
break
|
||||
if self.options["raw"]:
|
||||
stdout.write(chunk)
|
||||
stdout.write(chunk.decode())
|
||||
else:
|
||||
output.dataReceived(chunk)
|
||||
if not self.options["raw"]:
|
||||
|
@ -1,8 +1,9 @@
|
||||
from __future__ import print_function
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
import os.path
|
||||
import urllib
|
||||
import json
|
||||
from urllib.parse import quote as url_quote
|
||||
from collections import defaultdict
|
||||
from six.moves import cStringIO as StringIO
|
||||
from twisted.python.failure import Failure
|
||||
@ -15,6 +16,7 @@ from allmydata.util.fileutil import abspath_expanduser_unicode, precondition_abs
|
||||
from allmydata.util.encodingutil import unicode_to_url, listdir_unicode, quote_output, \
|
||||
quote_local_unicode_path, to_bytes
|
||||
from allmydata.util.assertutil import precondition, _assert
|
||||
from allmydata.util import jsonbytes as json
|
||||
|
||||
|
||||
class MissingSourceError(TahoeError):
|
||||
@ -61,8 +63,8 @@ def mkdir(targeturl):
|
||||
|
||||
def make_tahoe_subdirectory(nodeurl, parent_writecap, name):
|
||||
url = nodeurl + "/".join(["uri",
|
||||
urllib.quote(parent_writecap),
|
||||
urllib.quote(unicode_to_url(name)),
|
||||
url_quote(parent_writecap),
|
||||
url_quote(unicode_to_url(name)),
|
||||
]) + "?t=mkdir"
|
||||
resp = do_http("POST", url)
|
||||
if resp.status in (200, 201):
|
||||
@ -199,7 +201,7 @@ class TahoeFileSource(object):
|
||||
def open(self, caps_only):
|
||||
if caps_only:
|
||||
return StringIO(self.readcap)
|
||||
url = self.nodeurl + "uri/" + urllib.quote(self.readcap)
|
||||
url = self.nodeurl + "uri/" + url_quote(self.readcap)
|
||||
return GET_to_file(url)
|
||||
|
||||
def bestcap(self):
|
||||
@ -239,7 +241,7 @@ class TahoeDirectorySource(object):
|
||||
self.writecap = writecap
|
||||
self.readcap = readcap
|
||||
bestcap = writecap or readcap
|
||||
url = self.nodeurl + "uri/%s" % urllib.quote(bestcap)
|
||||
url = self.nodeurl + "uri/%s" % url_quote(bestcap)
|
||||
resp = do_http("GET", url + "?t=json")
|
||||
if resp.status != 200:
|
||||
raise HTTPError("Error examining source directory", resp)
|
||||
@ -249,7 +251,7 @@ class TahoeDirectorySource(object):
|
||||
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
||||
self.children_d = dict( [(unicode(name),value)
|
||||
for (name,value)
|
||||
in d["children"].iteritems()] )
|
||||
in d["children"].items()] )
|
||||
self.children = None
|
||||
|
||||
def init_from_parsed(self, parsed):
|
||||
@ -259,7 +261,7 @@ class TahoeDirectorySource(object):
|
||||
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
||||
self.children_d = dict( [(unicode(name),value)
|
||||
for (name,value)
|
||||
in d["children"].iteritems()] )
|
||||
in d["children"].items()] )
|
||||
self.children = None
|
||||
|
||||
def populate(self, recurse):
|
||||
@ -329,14 +331,14 @@ class TahoeDirectoryTarget(object):
|
||||
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
||||
self.children_d = dict( [(unicode(name),value)
|
||||
for (name,value)
|
||||
in d["children"].iteritems()] )
|
||||
in d["children"].items()] )
|
||||
self.children = None
|
||||
|
||||
def init_from_grid(self, writecap, readcap):
|
||||
self.writecap = writecap
|
||||
self.readcap = readcap
|
||||
bestcap = writecap or readcap
|
||||
url = self.nodeurl + "uri/%s" % urllib.quote(bestcap)
|
||||
url = self.nodeurl + "uri/%s" % url_quote(bestcap)
|
||||
resp = do_http("GET", url + "?t=json")
|
||||
if resp.status != 200:
|
||||
raise HTTPError("Error examining target directory", resp)
|
||||
@ -346,7 +348,7 @@ class TahoeDirectoryTarget(object):
|
||||
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
||||
self.children_d = dict( [(unicode(name),value)
|
||||
for (name,value)
|
||||
in d["children"].iteritems()] )
|
||||
in d["children"].items()] )
|
||||
self.children = None
|
||||
|
||||
def just_created(self, writecap):
|
||||
@ -370,8 +372,8 @@ class TahoeDirectoryTarget(object):
|
||||
url = None
|
||||
if self.writecap:
|
||||
url = self.nodeurl + "/".join(["uri",
|
||||
urllib.quote(self.writecap),
|
||||
urllib.quote(unicode_to_url(name))])
|
||||
url_quote(self.writecap),
|
||||
url_quote(unicode_to_url(name))])
|
||||
self.children[name] = TahoeFileTarget(self.nodeurl, mutable,
|
||||
writecap, readcap, url)
|
||||
elif data[0] == "dirnode":
|
||||
@ -439,7 +441,7 @@ class TahoeDirectoryTarget(object):
|
||||
def set_children(self):
|
||||
if not self.new_children:
|
||||
return
|
||||
url = (self.nodeurl + "uri/" + urllib.quote(self.writecap)
|
||||
url = (self.nodeurl + "uri/" + url_quote(self.writecap)
|
||||
+ "?t=set_children")
|
||||
set_data = {}
|
||||
for (name, filecap) in self.new_children.items():
|
||||
@ -450,7 +452,7 @@ class TahoeDirectoryTarget(object):
|
||||
# TODO: think about how this affects forward-compatibility for
|
||||
# unknown caps
|
||||
set_data[name] = ["filenode", {"rw_uri": filecap}]
|
||||
body = json.dumps(set_data)
|
||||
body = json.dumps_bytes(set_data)
|
||||
POST(url, body)
|
||||
|
||||
FileSources = (LocalFileSource, TahoeFileSource)
|
||||
@ -603,7 +605,7 @@ class Copier(object):
|
||||
t = LocalFileTarget(pathname) # non-empty
|
||||
else:
|
||||
# this is a tahoe object
|
||||
url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
|
||||
url = self.nodeurl + "uri/%s" % url_quote(rootcap)
|
||||
if path:
|
||||
url += "/" + escape_path(path)
|
||||
|
||||
@ -656,7 +658,7 @@ class Copier(object):
|
||||
t = LocalFileSource(pathname, name) # non-empty
|
||||
else:
|
||||
# this is a tahoe object
|
||||
url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
|
||||
url = self.nodeurl + "uri/%s" % url_quote(rootcap)
|
||||
name = None
|
||||
if path:
|
||||
if path.endswith("/"):
|
||||
|
@ -45,10 +45,10 @@ def list(options):
|
||||
return resp.status
|
||||
|
||||
data = resp.read()
|
||||
|
||||
if options['json']:
|
||||
# The webapi server should always output printable ASCII.
|
||||
if is_printable_ascii(data):
|
||||
data = unicode(data, "ascii")
|
||||
print(data, file=stdout)
|
||||
return 0
|
||||
else:
|
||||
|
@ -1,6 +1,9 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import urllib, json
|
||||
from past.builtins import unicode
|
||||
|
||||
from urllib.parse import quote as url_quote
|
||||
import json
|
||||
from twisted.protocols.basic import LineOnlyReceiver
|
||||
from allmydata.util.abbreviate import abbreviate_space_both
|
||||
from allmydata.scripts.slow_operation import SlowOperationRunner
|
||||
@ -33,9 +36,10 @@ class ManifestStreamer(LineOnlyReceiver, object):
|
||||
except UnknownAliasError as e:
|
||||
e.display(stderr)
|
||||
return 1
|
||||
path = unicode(path, "utf-8")
|
||||
if path == '/':
|
||||
path = ''
|
||||
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
||||
url = nodeurl + "uri/%s" % url_quote(rootcap)
|
||||
if path:
|
||||
url += "/" + escape_path(path)
|
||||
# todo: should it end with a slash?
|
||||
@ -63,7 +67,7 @@ class ManifestStreamer(LineOnlyReceiver, object):
|
||||
if self.in_error:
|
||||
print(quote_output(line, quotemarks=False), file=stderr)
|
||||
return
|
||||
if line.startswith("ERROR:"):
|
||||
if line.startswith(b"ERROR:"):
|
||||
self.in_error = True
|
||||
self.rc = 1
|
||||
print(quote_output(line, quotemarks=False), file=stderr)
|
||||
|
@ -1,6 +1,8 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import urllib
|
||||
from past.builtins import unicode
|
||||
|
||||
from urllib.parse import quote as url_quote
|
||||
from allmydata.scripts.common_http import do_http, check_http_error
|
||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, UnknownAliasError
|
||||
from allmydata.util.encodingutil import quote_output
|
||||
@ -24,7 +26,7 @@ def mkdir(options):
|
||||
# create a new unlinked directory
|
||||
url = nodeurl + "uri?t=mkdir"
|
||||
if options["format"]:
|
||||
url += "&format=%s" % urllib.quote(options['format'])
|
||||
url += "&format=%s" % url_quote(options['format'])
|
||||
resp = do_http("POST", url)
|
||||
rc = check_http_error(resp, stderr)
|
||||
if rc:
|
||||
@ -35,13 +37,14 @@ def mkdir(options):
|
||||
return 0
|
||||
|
||||
# create a new directory at the given location
|
||||
path = unicode(path, "utf-8")
|
||||
if path.endswith("/"):
|
||||
path = path[:-1]
|
||||
# path must be "/".join([s.encode("utf-8") for s in segments])
|
||||
url = nodeurl + "uri/%s/%s?t=mkdir" % (urllib.quote(rootcap),
|
||||
urllib.quote(path))
|
||||
url = nodeurl + "uri/%s/%s?t=mkdir" % (url_quote(rootcap),
|
||||
url_quote(path))
|
||||
if options['format']:
|
||||
url += "&format=%s" % urllib.quote(options['format'])
|
||||
url += "&format=%s" % url_quote(options['format'])
|
||||
|
||||
resp = do_http("POST", url)
|
||||
check_http_error(resp, stderr)
|
||||
|
@ -1,7 +1,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import re
|
||||
import urllib
|
||||
from urllib.parse import quote as url_quote
|
||||
import json
|
||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
UnknownAliasError
|
||||
@ -25,7 +25,7 @@ def mv(options, mode="move"):
|
||||
except UnknownAliasError as e:
|
||||
e.display(stderr)
|
||||
return 1
|
||||
from_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
||||
from_url = nodeurl + "uri/%s" % url_quote(rootcap)
|
||||
if from_path:
|
||||
from_url += "/" + escape_path(from_path)
|
||||
# figure out the source cap
|
||||
@ -43,7 +43,7 @@ def mv(options, mode="move"):
|
||||
except UnknownAliasError as e:
|
||||
e.display(stderr)
|
||||
return 1
|
||||
to_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
||||
to_url = nodeurl + "uri/%s" % url_quote(rootcap)
|
||||
if path:
|
||||
to_url += "/" + escape_path(path)
|
||||
|
||||
|
@ -1,7 +1,9 @@
|
||||
from __future__ import print_function
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
from six.moves import cStringIO as StringIO
|
||||
import urllib
|
||||
from urllib.parse import quote as url_quote
|
||||
|
||||
from allmydata.scripts.common_http import do_http, format_http_success, format_http_error
|
||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
@ -46,19 +48,20 @@ def put(options):
|
||||
|
||||
# FIXME: don't hardcode cap format.
|
||||
if to_file.startswith("URI:MDMF:") or to_file.startswith("URI:SSK:"):
|
||||
url = nodeurl + "uri/%s" % urllib.quote(to_file)
|
||||
url = nodeurl + "uri/%s" % url_quote(to_file)
|
||||
else:
|
||||
try:
|
||||
rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
|
||||
except UnknownAliasError as e:
|
||||
e.display(stderr)
|
||||
return 1
|
||||
path = unicode(path, "utf-8")
|
||||
if path.startswith("/"):
|
||||
suggestion = to_file.replace(u"/", u"", 1)
|
||||
print("Error: The remote filename must not start with a slash", file=stderr)
|
||||
print("Please try again, perhaps with %s" % quote_output(suggestion), file=stderr)
|
||||
return 1
|
||||
url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
|
||||
url = nodeurl + "uri/%s/" % url_quote(rootcap)
|
||||
if path:
|
||||
url += escape_path(path)
|
||||
else:
|
||||
|
@ -1,3 +1,13 @@
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
from six import ensure_text
|
||||
|
||||
import os.path
|
||||
import json
|
||||
from twisted.trial import unittest
|
||||
@ -5,20 +15,21 @@ from six.moves import cStringIO as StringIO
|
||||
|
||||
from allmydata import uri
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.encodingutil import quote_output, to_bytes
|
||||
from allmydata.util.encodingutil import to_bytes
|
||||
from allmydata.mutable.publish import MutableData
|
||||
from allmydata.immutable import upload
|
||||
from allmydata.scripts import debug
|
||||
from ..no_network import GridTestMixin
|
||||
from .common import CLITestMixin
|
||||
|
||||
|
||||
class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
|
||||
def test_check(self):
|
||||
self.basedir = "cli/Check/check"
|
||||
self.set_up_grid()
|
||||
c0 = self.g.clients[0]
|
||||
DATA = "data" * 100
|
||||
DATA = b"data" * 100
|
||||
DATA_uploadable = MutableData(DATA)
|
||||
d = c0.create_mutable_file(DATA_uploadable)
|
||||
def _stash_uri(n):
|
||||
@ -28,7 +39,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
d.addCallback(lambda ign: self.do_cli("check", self.uri))
|
||||
def _check1(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
lines = out.splitlines()
|
||||
self.failUnless("Summary: Healthy" in lines, out)
|
||||
@ -38,14 +49,14 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
d.addCallback(lambda ign: self.do_cli("check", "--raw", self.uri))
|
||||
def _check2(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
data = json.loads(out)
|
||||
self.failUnlessReallyEqual(to_bytes(data["summary"]), "Healthy")
|
||||
self.failUnlessReallyEqual(to_bytes(data["summary"]), b"Healthy")
|
||||
self.failUnlessReallyEqual(data["results"]["healthy"], True)
|
||||
d.addCallback(_check2)
|
||||
|
||||
d.addCallback(lambda ign: c0.upload(upload.Data("literal", convergence="")))
|
||||
d.addCallback(lambda ign: c0.upload(upload.Data(b"literal", convergence=b"")))
|
||||
def _stash_lit_uri(n):
|
||||
self.lit_uri = n.get_uri()
|
||||
d.addCallback(_stash_lit_uri)
|
||||
@ -53,7 +64,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
d.addCallback(lambda ign: self.do_cli("check", self.lit_uri))
|
||||
def _check_lit(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
lines = out.splitlines()
|
||||
self.failUnless("Summary: Healthy (LIT)" in lines, out)
|
||||
@ -62,13 +73,13 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
d.addCallback(lambda ign: self.do_cli("check", "--raw", self.lit_uri))
|
||||
def _check_lit_raw(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
data = json.loads(out)
|
||||
self.failUnlessReallyEqual(data["results"]["healthy"], True)
|
||||
d.addCallback(_check_lit_raw)
|
||||
|
||||
d.addCallback(lambda ign: c0.create_immutable_dirnode({}, convergence=""))
|
||||
d.addCallback(lambda ign: c0.create_immutable_dirnode({}, convergence=b""))
|
||||
def _stash_lit_dir_uri(n):
|
||||
self.lit_dir_uri = n.get_uri()
|
||||
d.addCallback(_stash_lit_dir_uri)
|
||||
@ -89,16 +100,16 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
cso.parseOptions([shares[1][2]])
|
||||
storage_index = uri.from_string(self.uri).get_storage_index()
|
||||
self._corrupt_share_line = " server %s, SI %s, shnum %d" % \
|
||||
(base32.b2a(shares[1][1]),
|
||||
base32.b2a(storage_index),
|
||||
shares[1][0])
|
||||
(str(base32.b2a(shares[1][1]), "ascii"),
|
||||
str(base32.b2a(storage_index), "ascii"),
|
||||
shares[1][0])
|
||||
debug.corrupt_share(cso)
|
||||
d.addCallback(_clobber_shares)
|
||||
|
||||
d.addCallback(lambda ign: self.do_cli("check", "--verify", self.uri))
|
||||
def _check3(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
lines = out.splitlines()
|
||||
summary = [l for l in lines if l.startswith("Summary")][0]
|
||||
@ -112,7 +123,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
d.addCallback(lambda ign: self.do_cli("check", "--verify", "--raw", self.uri))
|
||||
def _check3_raw(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
data = json.loads(out)
|
||||
self.failUnlessReallyEqual(data["results"]["healthy"], False)
|
||||
@ -126,7 +137,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.do_cli("check", "--verify", "--repair", self.uri))
|
||||
def _check4(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
lines = out.splitlines()
|
||||
self.failUnless("Summary: not healthy" in lines, out)
|
||||
@ -140,7 +151,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.do_cli("check", "--verify", "--repair", self.uri))
|
||||
def _check5(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
lines = out.splitlines()
|
||||
self.failUnless("Summary: healthy" in lines, out)
|
||||
@ -156,14 +167,14 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
c0 = self.g.clients[0]
|
||||
self.uris = {}
|
||||
self.fileurls = {}
|
||||
DATA = "data" * 100
|
||||
quoted_good = quote_output(u"g\u00F6\u00F6d")
|
||||
DATA = b"data" * 100
|
||||
quoted_good = u"'g\u00F6\u00F6d'"
|
||||
|
||||
d = c0.create_dirnode()
|
||||
def _stash_root_and_create_file(n):
|
||||
self.rootnode = n
|
||||
self.rooturi = n.get_uri()
|
||||
return n.add_file(u"g\u00F6\u00F6d", upload.Data(DATA, convergence=""))
|
||||
return n.add_file(u"g\u00F6\u00F6d", upload.Data(DATA, convergence=b""))
|
||||
d.addCallback(_stash_root_and_create_file)
|
||||
def _stash_uri(fn, which):
|
||||
self.uris[which] = fn.get_uri()
|
||||
@ -171,18 +182,18 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
d.addCallback(_stash_uri, u"g\u00F6\u00F6d")
|
||||
d.addCallback(lambda ign:
|
||||
self.rootnode.add_file(u"small",
|
||||
upload.Data("literal",
|
||||
convergence="")))
|
||||
upload.Data(b"literal",
|
||||
convergence=b"")))
|
||||
d.addCallback(_stash_uri, "small")
|
||||
d.addCallback(lambda ign:
|
||||
c0.create_mutable_file(MutableData(DATA+"1")))
|
||||
c0.create_mutable_file(MutableData(DATA+b"1")))
|
||||
d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
|
||||
d.addCallback(_stash_uri, "mutable")
|
||||
|
||||
d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
|
||||
def _check1(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
lines = out.splitlines()
|
||||
self.failUnless("done: 4 objects checked, 4 healthy, 0 unhealthy"
|
||||
@ -198,8 +209,9 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.rooturi))
|
||||
def _check2(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
out = ensure_text(out)
|
||||
lines = out.splitlines()
|
||||
self.failUnless("'<root>': Healthy" in lines, out)
|
||||
self.failUnless("'small': Healthy (LIT)" in lines, out)
|
||||
@ -212,7 +224,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
d.addCallback(lambda ign: self.do_cli("stats", self.rooturi))
|
||||
def _check_stats(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
lines = out.splitlines()
|
||||
self.failUnlessIn(" count-immutable-files: 1", lines)
|
||||
@ -236,8 +248,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
cso.parseOptions([shares[1][2]])
|
||||
storage_index = uri.from_string(self.uris["mutable"]).get_storage_index()
|
||||
self._corrupt_share_line = " corrupt: server %s, SI %s, shnum %d" % \
|
||||
(base32.b2a(shares[1][1]),
|
||||
base32.b2a(storage_index),
|
||||
(str(base32.b2a(shares[1][1]), "ascii"),
|
||||
str(base32.b2a(storage_index), "ascii"),
|
||||
shares[1][0])
|
||||
debug.corrupt_share(cso)
|
||||
d.addCallback(_clobber_shares)
|
||||
@ -251,8 +263,9 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.do_cli("deep-check", "--verbose", self.rooturi))
|
||||
def _check3(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
out = ensure_text(out)
|
||||
lines = out.splitlines()
|
||||
self.failUnless("'<root>': Healthy" in lines, out)
|
||||
self.failUnless("'small': Healthy (LIT)" in lines, out)
|
||||
@ -268,8 +281,9 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.rooturi))
|
||||
def _check4(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
out = ensure_text(out)
|
||||
lines = out.splitlines()
|
||||
self.failUnless("'<root>': Healthy" in lines, out)
|
||||
self.failUnless("'small': Healthy (LIT)" in lines, out)
|
||||
@ -287,7 +301,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.rooturi))
|
||||
def _check5(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
lines = out.splitlines()
|
||||
units = [json.loads(line) for line in lines]
|
||||
@ -301,8 +315,9 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.rooturi))
|
||||
def _check6(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
out = ensure_text(out)
|
||||
lines = out.splitlines()
|
||||
self.failUnless("'<root>': healthy" in lines, out)
|
||||
self.failUnless("'small': healthy" in lines, out)
|
||||
@ -322,10 +337,10 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"subdir"))
|
||||
d.addCallback(_stash_uri, "subdir")
|
||||
d.addCallback(lambda fn:
|
||||
fn.add_file(u"subfile", upload.Data(DATA+"2", "")))
|
||||
fn.add_file(u"subfile", upload.Data(DATA+b"2", b"")))
|
||||
d.addCallback(lambda ign:
|
||||
self.delete_shares_numbered(self.uris["subdir"],
|
||||
range(10)))
|
||||
list(range(10))))
|
||||
|
||||
# root
|
||||
# rootg\u00F6\u00F6d/
|
||||
@ -340,7 +355,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.failIfEqual(rc, 0)
|
||||
self.failUnlessIn("ERROR: UnrecoverableFileError", err)
|
||||
# the fatal directory should still show up, as the last line
|
||||
self.failUnlessIn(" subdir\n", out)
|
||||
self.failUnlessIn(" subdir\n", ensure_text(out))
|
||||
d.addCallback(_manifest_failed)
|
||||
|
||||
d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
|
||||
@ -379,7 +394,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(rc, 1)
|
||||
self.failUnlessIn("error:", err)
|
||||
self.failUnlessReallyEqual(out, "")
|
||||
self.assertEqual(len(out), 0, out)
|
||||
d.addCallback(_check)
|
||||
d.addCallback(lambda ign: self.do_cli("deep-check"))
|
||||
d.addCallback(_check)
|
||||
@ -396,7 +411,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.failUnlessReallyEqual(rc, 1)
|
||||
self.failUnlessIn("error:", err)
|
||||
self.failUnlessIn("nonexistent", err)
|
||||
self.failUnlessReallyEqual(out, "")
|
||||
self.assertEqual(len(out), 0, out)
|
||||
d.addCallback(_check)
|
||||
return d
|
||||
|
||||
@ -416,10 +431,10 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
def _check(args):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
#Ensure healthy appears for each uri
|
||||
self.failUnlessIn("Healthy", out[:len(out)/2])
|
||||
self.failUnlessIn("Healthy", out[len(out)/2:])
|
||||
self.failUnlessIn("Healthy", out[:len(out)//2])
|
||||
self.failUnlessIn("Healthy", out[len(out)//2:])
|
||||
d.addCallback(_check)
|
||||
|
||||
d.addCallback(lambda ign: self.do_cli("check", self.uriList[0], "nonexistent:"))
|
||||
|
@ -1,4 +1,14 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os.path, json
|
||||
from twisted.trial import unittest
|
||||
@ -24,12 +34,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
def test_unicode_filename(self):
|
||||
self.basedir = "cli/Cp/unicode_filename"
|
||||
|
||||
fn1 = os.path.join(unicode(self.basedir), u"\u00C4rtonwall")
|
||||
try:
|
||||
fn1_arg = fn1.encode(get_io_encoding())
|
||||
artonwall_arg = u"\u00C4rtonwall".encode(get_io_encoding())
|
||||
except UnicodeEncodeError:
|
||||
raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.")
|
||||
fn1 = os.path.join(self.basedir, u"\u00C4rtonwall")
|
||||
artonwall_arg = u"\u00C4rtonwall"
|
||||
|
||||
skip_if_cannot_represent_filename(fn1)
|
||||
|
||||
@ -44,15 +50,15 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
|
||||
d = self.do_cli("create-alias", "tahoe")
|
||||
|
||||
d.addCallback(lambda res: self.do_cli("cp", fn1_arg, "tahoe:"))
|
||||
d.addCallback(lambda res: self.do_cli("cp", fn1, "tahoe:"))
|
||||
|
||||
d.addCallback(lambda res: self.do_cli("get", "tahoe:" + artonwall_arg))
|
||||
d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA1))
|
||||
d.addCallback(lambda rc_out_err: self.assertEqual(rc_out_err[1], DATA1))
|
||||
|
||||
d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:"))
|
||||
|
||||
d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica"))
|
||||
d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA2))
|
||||
d.addCallback(lambda rc_out_err: self.assertEqual(rc_out_err[1], DATA2))
|
||||
|
||||
d.addCallback(lambda res: self.do_cli("ls", "tahoe:"))
|
||||
def _check(args):
|
||||
@ -66,8 +72,10 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.failUnlessIn("files whose names could not be converted", err)
|
||||
else:
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
self.failUnlessReallyEqual(out.decode(get_io_encoding()), u"Metallica\n\u00C4rtonwall\n")
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
if PY2:
|
||||
out = out.decode(get_io_encoding())
|
||||
self.failUnlessReallyEqual(out, u"Metallica\n\u00C4rtonwall\n")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
d.addCallback(_check)
|
||||
|
||||
return d
|
||||
@ -98,7 +106,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
fn1 = os.path.join(self.basedir, "Metallica")
|
||||
fn2 = os.path.join(outdir, "Not Metallica")
|
||||
fn3 = os.path.join(outdir, "test2")
|
||||
DATA1 = "puppies" * 10000
|
||||
DATA1 = b"puppies" * 10000
|
||||
fileutil.write(fn1, DATA1)
|
||||
|
||||
d = self.do_cli("create-alias", "tahoe")
|
||||
@ -128,7 +136,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.failUnlessReallyEqual(rc, 1)
|
||||
self.failUnlessIn("when copying into a directory, all source files must have names, but",
|
||||
err)
|
||||
self.failUnlessReallyEqual(out, "")
|
||||
self.assertEqual(len(out), 0, out)
|
||||
d.addCallback(_resp)
|
||||
|
||||
# Create a directory, linked at tahoe:test .
|
||||
@ -200,13 +208,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
def test_unicode_dirnames(self):
|
||||
self.basedir = "cli/Cp/unicode_dirnames"
|
||||
|
||||
fn1 = os.path.join(unicode(self.basedir), u"\u00C4rtonwall")
|
||||
try:
|
||||
fn1_arg = fn1.encode(get_io_encoding())
|
||||
del fn1_arg # hush pyflakes
|
||||
artonwall_arg = u"\u00C4rtonwall".encode(get_io_encoding())
|
||||
except UnicodeEncodeError:
|
||||
raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.")
|
||||
fn1 = os.path.join(self.basedir, u"\u00C4rtonwall")
|
||||
artonwall_arg = u"\u00C4rtonwall"
|
||||
|
||||
skip_if_cannot_represent_filename(fn1)
|
||||
|
||||
@ -222,13 +225,15 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
unicode_to_output(u"\u00C4rtonwall")
|
||||
except UnicodeEncodeError:
|
||||
self.failUnlessReallyEqual(rc, 1)
|
||||
self.failUnlessReallyEqual(out, "")
|
||||
self.assertEqual(len(out), 0, out)
|
||||
self.failUnlessIn(quote_output(u"\u00C4rtonwall"), err)
|
||||
self.failUnlessIn("files whose names could not be converted", err)
|
||||
else:
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
self.failUnlessReallyEqual(out.decode(get_io_encoding()), u"\u00C4rtonwall\n")
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
if PY2:
|
||||
out = out.decode(get_io_encoding())
|
||||
self.failUnlessReallyEqual(out, u"\u00C4rtonwall\n")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
d.addCallback(_check)
|
||||
|
||||
return d
|
||||
@ -818,9 +823,9 @@ cp -r $DIRCAP5 $DIRCAP6 to : E9-COLLIDING-TARGETS
|
||||
"""
|
||||
|
||||
class CopyOut(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
FILE_CONTENTS = "file text"
|
||||
FILE_CONTENTS_5 = "5"
|
||||
FILE_CONTENTS_6 = "6"
|
||||
FILE_CONTENTS = b"file text"
|
||||
FILE_CONTENTS_5 = b"5"
|
||||
FILE_CONTENTS_6 = b"6"
|
||||
|
||||
def do_setup(self):
|
||||
# first we build a tahoe filesystem that contains:
|
||||
|
@ -176,6 +176,8 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.cli.test_alias",
|
||||
"allmydata.test.cli.test_backup",
|
||||
"allmydata.test.cli.test_backupdb",
|
||||
"allmydata.test.cli.test_check",
|
||||
"allmydata.test.cli.test_cp",
|
||||
"allmydata.test.cli.test_create",
|
||||
"allmydata.test.cli.test_invite",
|
||||
"allmydata.test.cli.test_status",
|
||||
|
Loading…
Reference in New Issue
Block a user