mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-19 13:07:56 +00:00
Merge branch '3679.more-cli-tests-python-3' into 3687.cli-tests-python-3
This commit is contained in:
commit
e48d85dd8b
0
newsfragments/3678.minor
Normal file
0
newsfragments/3678.minor
Normal file
0
newsfragments/3679.minor
Normal file
0
newsfragments/3679.minor
Normal file
@ -224,7 +224,7 @@ class CpOptions(FileStoreOptions):
|
|||||||
def parseArgs(self, *args):
|
def parseArgs(self, *args):
|
||||||
if len(args) < 2:
|
if len(args) < 2:
|
||||||
raise usage.UsageError("cp requires at least two arguments")
|
raise usage.UsageError("cp requires at least two arguments")
|
||||||
self.sources = map(argv_to_unicode, args[:-1])
|
self.sources = list(map(argv_to_unicode, args[:-1]))
|
||||||
self.destination = argv_to_unicode(args[-1])
|
self.destination = argv_to_unicode(args[-1])
|
||||||
|
|
||||||
synopsis = "[options] FROM.. TO"
|
synopsis = "[options] FROM.. TO"
|
||||||
@ -435,7 +435,7 @@ class CheckOptions(FileStoreOptions):
|
|||||||
("add-lease", None, "Add/renew lease on all shares."),
|
("add-lease", None, "Add/renew lease on all shares."),
|
||||||
]
|
]
|
||||||
def parseArgs(self, *locations):
|
def parseArgs(self, *locations):
|
||||||
self.locations = map(argv_to_unicode, locations)
|
self.locations = list(map(argv_to_unicode, locations))
|
||||||
|
|
||||||
synopsis = "[options] [ALIAS:PATH]"
|
synopsis = "[options] [ALIAS:PATH]"
|
||||||
description = """
|
description = """
|
||||||
@ -452,7 +452,7 @@ class DeepCheckOptions(FileStoreOptions):
|
|||||||
("verbose", "v", "Be noisy about what is happening."),
|
("verbose", "v", "Be noisy about what is happening."),
|
||||||
]
|
]
|
||||||
def parseArgs(self, *locations):
|
def parseArgs(self, *locations):
|
||||||
self.locations = map(argv_to_unicode, locations)
|
self.locations = list(map(argv_to_unicode, locations))
|
||||||
|
|
||||||
synopsis = "[options] [ALIAS:PATH]"
|
synopsis = "[options] [ALIAS:PATH]"
|
||||||
description = """
|
description = """
|
||||||
|
@ -1,12 +1,15 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from past.builtins import unicode
|
||||||
|
from six import ensure_str
|
||||||
|
|
||||||
import os, time
|
import os, time
|
||||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||||
UnknownAliasError
|
UnknownAliasError
|
||||||
from allmydata.scripts.common_http import do_http, format_http_error
|
from allmydata.scripts.common_http import do_http, format_http_error
|
||||||
from allmydata.util import base32
|
from allmydata.util import base32
|
||||||
from allmydata.util.encodingutil import quote_output, is_printable_ascii
|
from allmydata.util.encodingutil import quote_output, is_printable_ascii
|
||||||
import urllib
|
from urllib.parse import quote as url_quote
|
||||||
import json
|
import json
|
||||||
|
|
||||||
class SlowOperationRunner(object):
|
class SlowOperationRunner(object):
|
||||||
@ -14,7 +17,7 @@ class SlowOperationRunner(object):
|
|||||||
def run(self, options):
|
def run(self, options):
|
||||||
stderr = options.stderr
|
stderr = options.stderr
|
||||||
self.options = options
|
self.options = options
|
||||||
self.ophandle = ophandle = base32.b2a(os.urandom(16))
|
self.ophandle = ophandle = ensure_str(base32.b2a(os.urandom(16)))
|
||||||
nodeurl = options['node-url']
|
nodeurl = options['node-url']
|
||||||
if not nodeurl.endswith("/"):
|
if not nodeurl.endswith("/"):
|
||||||
nodeurl += "/"
|
nodeurl += "/"
|
||||||
@ -25,9 +28,10 @@ class SlowOperationRunner(object):
|
|||||||
except UnknownAliasError as e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
|
path = unicode(path, "utf-8")
|
||||||
if path == '/':
|
if path == '/':
|
||||||
path = ''
|
path = ''
|
||||||
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
url = nodeurl + "uri/%s" % url_quote(rootcap)
|
||||||
if path:
|
if path:
|
||||||
url += "/" + escape_path(path)
|
url += "/" + escape_path(path)
|
||||||
# todo: should it end with a slash?
|
# todo: should it end with a slash?
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import urllib
|
from urllib.parse import quote as url_quote
|
||||||
import json
|
import json
|
||||||
|
|
||||||
# Python 2 compatibility
|
# Python 2 compatibility
|
||||||
@ -34,9 +34,10 @@ def check_location(options, where):
|
|||||||
except UnknownAliasError as e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
|
path = str(path, "utf-8")
|
||||||
if path == '/':
|
if path == '/':
|
||||||
path = ''
|
path = ''
|
||||||
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
url = nodeurl + "uri/%s" % url_quote(rootcap)
|
||||||
if path:
|
if path:
|
||||||
url += "/" + escape_path(path)
|
url += "/" + escape_path(path)
|
||||||
# todo: should it end with a slash?
|
# todo: should it end with a slash?
|
||||||
@ -52,7 +53,8 @@ def check_location(options, where):
|
|||||||
if resp.status != 200:
|
if resp.status != 200:
|
||||||
print(format_http_error("ERROR", resp), file=stderr)
|
print(format_http_error("ERROR", resp), file=stderr)
|
||||||
return 1
|
return 1
|
||||||
jdata = resp.read()
|
jdata = resp.read().decode()
|
||||||
|
|
||||||
if options.get("raw"):
|
if options.get("raw"):
|
||||||
stdout.write(jdata)
|
stdout.write(jdata)
|
||||||
stdout.write("\n")
|
stdout.write("\n")
|
||||||
@ -139,7 +141,7 @@ class DeepCheckOutput(LineOnlyReceiver, object):
|
|||||||
if self.in_error:
|
if self.in_error:
|
||||||
print(quote_output(line, quotemarks=False), file=self.stderr)
|
print(quote_output(line, quotemarks=False), file=self.stderr)
|
||||||
return
|
return
|
||||||
if line.startswith("ERROR:"):
|
if line.startswith(b"ERROR:"):
|
||||||
self.in_error = True
|
self.in_error = True
|
||||||
self.streamer.rc = 1
|
self.streamer.rc = 1
|
||||||
print(quote_output(line, quotemarks=False), file=self.stderr)
|
print(quote_output(line, quotemarks=False), file=self.stderr)
|
||||||
@ -202,7 +204,7 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver, object):
|
|||||||
if self.in_error:
|
if self.in_error:
|
||||||
print(quote_output(line, quotemarks=False), file=self.stderr)
|
print(quote_output(line, quotemarks=False), file=self.stderr)
|
||||||
return
|
return
|
||||||
if line.startswith("ERROR:"):
|
if line.startswith(b"ERROR:"):
|
||||||
self.in_error = True
|
self.in_error = True
|
||||||
self.streamer.rc = 1
|
self.streamer.rc = 1
|
||||||
print(quote_output(line, quotemarks=False), file=self.stderr)
|
print(quote_output(line, quotemarks=False), file=self.stderr)
|
||||||
@ -295,9 +297,10 @@ class DeepCheckStreamer(LineOnlyReceiver, object):
|
|||||||
except UnknownAliasError as e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
|
path = str(path, "utf-8")
|
||||||
if path == '/':
|
if path == '/':
|
||||||
path = ''
|
path = ''
|
||||||
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
url = nodeurl + "uri/%s" % url_quote(rootcap)
|
||||||
if path:
|
if path:
|
||||||
url += "/" + escape_path(path)
|
url += "/" + escape_path(path)
|
||||||
# todo: should it end with a slash?
|
# todo: should it end with a slash?
|
||||||
@ -322,7 +325,7 @@ class DeepCheckStreamer(LineOnlyReceiver, object):
|
|||||||
if not chunk:
|
if not chunk:
|
||||||
break
|
break
|
||||||
if self.options["raw"]:
|
if self.options["raw"]:
|
||||||
stdout.write(chunk)
|
stdout.write(chunk.decode())
|
||||||
else:
|
else:
|
||||||
output.dataReceived(chunk)
|
output.dataReceived(chunk)
|
||||||
if not self.options["raw"]:
|
if not self.options["raw"]:
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from past.builtins import unicode
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import urllib
|
from urllib.parse import quote as url_quote
|
||||||
import json
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from six.moves import cStringIO as StringIO
|
from six.moves import cStringIO as StringIO
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
@ -15,6 +16,7 @@ from allmydata.util.fileutil import abspath_expanduser_unicode, precondition_abs
|
|||||||
from allmydata.util.encodingutil import unicode_to_url, listdir_unicode, quote_output, \
|
from allmydata.util.encodingutil import unicode_to_url, listdir_unicode, quote_output, \
|
||||||
quote_local_unicode_path, to_bytes
|
quote_local_unicode_path, to_bytes
|
||||||
from allmydata.util.assertutil import precondition, _assert
|
from allmydata.util.assertutil import precondition, _assert
|
||||||
|
from allmydata.util import jsonbytes as json
|
||||||
|
|
||||||
|
|
||||||
class MissingSourceError(TahoeError):
|
class MissingSourceError(TahoeError):
|
||||||
@ -61,8 +63,8 @@ def mkdir(targeturl):
|
|||||||
|
|
||||||
def make_tahoe_subdirectory(nodeurl, parent_writecap, name):
|
def make_tahoe_subdirectory(nodeurl, parent_writecap, name):
|
||||||
url = nodeurl + "/".join(["uri",
|
url = nodeurl + "/".join(["uri",
|
||||||
urllib.quote(parent_writecap),
|
url_quote(parent_writecap),
|
||||||
urllib.quote(unicode_to_url(name)),
|
url_quote(unicode_to_url(name)),
|
||||||
]) + "?t=mkdir"
|
]) + "?t=mkdir"
|
||||||
resp = do_http("POST", url)
|
resp = do_http("POST", url)
|
||||||
if resp.status in (200, 201):
|
if resp.status in (200, 201):
|
||||||
@ -199,7 +201,7 @@ class TahoeFileSource(object):
|
|||||||
def open(self, caps_only):
|
def open(self, caps_only):
|
||||||
if caps_only:
|
if caps_only:
|
||||||
return StringIO(self.readcap)
|
return StringIO(self.readcap)
|
||||||
url = self.nodeurl + "uri/" + urllib.quote(self.readcap)
|
url = self.nodeurl + "uri/" + url_quote(self.readcap)
|
||||||
return GET_to_file(url)
|
return GET_to_file(url)
|
||||||
|
|
||||||
def bestcap(self):
|
def bestcap(self):
|
||||||
@ -239,7 +241,7 @@ class TahoeDirectorySource(object):
|
|||||||
self.writecap = writecap
|
self.writecap = writecap
|
||||||
self.readcap = readcap
|
self.readcap = readcap
|
||||||
bestcap = writecap or readcap
|
bestcap = writecap or readcap
|
||||||
url = self.nodeurl + "uri/%s" % urllib.quote(bestcap)
|
url = self.nodeurl + "uri/%s" % url_quote(bestcap)
|
||||||
resp = do_http("GET", url + "?t=json")
|
resp = do_http("GET", url + "?t=json")
|
||||||
if resp.status != 200:
|
if resp.status != 200:
|
||||||
raise HTTPError("Error examining source directory", resp)
|
raise HTTPError("Error examining source directory", resp)
|
||||||
@ -249,7 +251,7 @@ class TahoeDirectorySource(object):
|
|||||||
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
||||||
self.children_d = dict( [(unicode(name),value)
|
self.children_d = dict( [(unicode(name),value)
|
||||||
for (name,value)
|
for (name,value)
|
||||||
in d["children"].iteritems()] )
|
in d["children"].items()] )
|
||||||
self.children = None
|
self.children = None
|
||||||
|
|
||||||
def init_from_parsed(self, parsed):
|
def init_from_parsed(self, parsed):
|
||||||
@ -259,7 +261,7 @@ class TahoeDirectorySource(object):
|
|||||||
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
||||||
self.children_d = dict( [(unicode(name),value)
|
self.children_d = dict( [(unicode(name),value)
|
||||||
for (name,value)
|
for (name,value)
|
||||||
in d["children"].iteritems()] )
|
in d["children"].items()] )
|
||||||
self.children = None
|
self.children = None
|
||||||
|
|
||||||
def populate(self, recurse):
|
def populate(self, recurse):
|
||||||
@ -329,14 +331,14 @@ class TahoeDirectoryTarget(object):
|
|||||||
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
||||||
self.children_d = dict( [(unicode(name),value)
|
self.children_d = dict( [(unicode(name),value)
|
||||||
for (name,value)
|
for (name,value)
|
||||||
in d["children"].iteritems()] )
|
in d["children"].items()] )
|
||||||
self.children = None
|
self.children = None
|
||||||
|
|
||||||
def init_from_grid(self, writecap, readcap):
|
def init_from_grid(self, writecap, readcap):
|
||||||
self.writecap = writecap
|
self.writecap = writecap
|
||||||
self.readcap = readcap
|
self.readcap = readcap
|
||||||
bestcap = writecap or readcap
|
bestcap = writecap or readcap
|
||||||
url = self.nodeurl + "uri/%s" % urllib.quote(bestcap)
|
url = self.nodeurl + "uri/%s" % url_quote(bestcap)
|
||||||
resp = do_http("GET", url + "?t=json")
|
resp = do_http("GET", url + "?t=json")
|
||||||
if resp.status != 200:
|
if resp.status != 200:
|
||||||
raise HTTPError("Error examining target directory", resp)
|
raise HTTPError("Error examining target directory", resp)
|
||||||
@ -346,7 +348,7 @@ class TahoeDirectoryTarget(object):
|
|||||||
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
||||||
self.children_d = dict( [(unicode(name),value)
|
self.children_d = dict( [(unicode(name),value)
|
||||||
for (name,value)
|
for (name,value)
|
||||||
in d["children"].iteritems()] )
|
in d["children"].items()] )
|
||||||
self.children = None
|
self.children = None
|
||||||
|
|
||||||
def just_created(self, writecap):
|
def just_created(self, writecap):
|
||||||
@ -370,8 +372,8 @@ class TahoeDirectoryTarget(object):
|
|||||||
url = None
|
url = None
|
||||||
if self.writecap:
|
if self.writecap:
|
||||||
url = self.nodeurl + "/".join(["uri",
|
url = self.nodeurl + "/".join(["uri",
|
||||||
urllib.quote(self.writecap),
|
url_quote(self.writecap),
|
||||||
urllib.quote(unicode_to_url(name))])
|
url_quote(unicode_to_url(name))])
|
||||||
self.children[name] = TahoeFileTarget(self.nodeurl, mutable,
|
self.children[name] = TahoeFileTarget(self.nodeurl, mutable,
|
||||||
writecap, readcap, url)
|
writecap, readcap, url)
|
||||||
elif data[0] == "dirnode":
|
elif data[0] == "dirnode":
|
||||||
@ -439,7 +441,7 @@ class TahoeDirectoryTarget(object):
|
|||||||
def set_children(self):
|
def set_children(self):
|
||||||
if not self.new_children:
|
if not self.new_children:
|
||||||
return
|
return
|
||||||
url = (self.nodeurl + "uri/" + urllib.quote(self.writecap)
|
url = (self.nodeurl + "uri/" + url_quote(self.writecap)
|
||||||
+ "?t=set_children")
|
+ "?t=set_children")
|
||||||
set_data = {}
|
set_data = {}
|
||||||
for (name, filecap) in self.new_children.items():
|
for (name, filecap) in self.new_children.items():
|
||||||
@ -450,7 +452,7 @@ class TahoeDirectoryTarget(object):
|
|||||||
# TODO: think about how this affects forward-compatibility for
|
# TODO: think about how this affects forward-compatibility for
|
||||||
# unknown caps
|
# unknown caps
|
||||||
set_data[name] = ["filenode", {"rw_uri": filecap}]
|
set_data[name] = ["filenode", {"rw_uri": filecap}]
|
||||||
body = json.dumps(set_data)
|
body = json.dumps_bytes(set_data)
|
||||||
POST(url, body)
|
POST(url, body)
|
||||||
|
|
||||||
FileSources = (LocalFileSource, TahoeFileSource)
|
FileSources = (LocalFileSource, TahoeFileSource)
|
||||||
@ -603,7 +605,7 @@ class Copier(object):
|
|||||||
t = LocalFileTarget(pathname) # non-empty
|
t = LocalFileTarget(pathname) # non-empty
|
||||||
else:
|
else:
|
||||||
# this is a tahoe object
|
# this is a tahoe object
|
||||||
url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
|
url = self.nodeurl + "uri/%s" % url_quote(rootcap)
|
||||||
if path:
|
if path:
|
||||||
url += "/" + escape_path(path)
|
url += "/" + escape_path(path)
|
||||||
|
|
||||||
@ -656,7 +658,7 @@ class Copier(object):
|
|||||||
t = LocalFileSource(pathname, name) # non-empty
|
t = LocalFileSource(pathname, name) # non-empty
|
||||||
else:
|
else:
|
||||||
# this is a tahoe object
|
# this is a tahoe object
|
||||||
url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
|
url = self.nodeurl + "uri/%s" % url_quote(rootcap)
|
||||||
name = None
|
name = None
|
||||||
if path:
|
if path:
|
||||||
if path.endswith("/"):
|
if path.endswith("/"):
|
||||||
|
@ -45,10 +45,10 @@ def list(options):
|
|||||||
return resp.status
|
return resp.status
|
||||||
|
|
||||||
data = resp.read()
|
data = resp.read()
|
||||||
|
|
||||||
if options['json']:
|
if options['json']:
|
||||||
# The webapi server should always output printable ASCII.
|
# The webapi server should always output printable ASCII.
|
||||||
if is_printable_ascii(data):
|
if is_printable_ascii(data):
|
||||||
|
data = unicode(data, "ascii")
|
||||||
print(data, file=stdout)
|
print(data, file=stdout)
|
||||||
return 0
|
return 0
|
||||||
else:
|
else:
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import urllib, json
|
from past.builtins import unicode
|
||||||
|
|
||||||
|
from urllib.parse import quote as url_quote
|
||||||
|
import json
|
||||||
from twisted.protocols.basic import LineOnlyReceiver
|
from twisted.protocols.basic import LineOnlyReceiver
|
||||||
from allmydata.util.abbreviate import abbreviate_space_both
|
from allmydata.util.abbreviate import abbreviate_space_both
|
||||||
from allmydata.scripts.slow_operation import SlowOperationRunner
|
from allmydata.scripts.slow_operation import SlowOperationRunner
|
||||||
@ -33,9 +36,10 @@ class ManifestStreamer(LineOnlyReceiver, object):
|
|||||||
except UnknownAliasError as e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
|
path = unicode(path, "utf-8")
|
||||||
if path == '/':
|
if path == '/':
|
||||||
path = ''
|
path = ''
|
||||||
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
url = nodeurl + "uri/%s" % url_quote(rootcap)
|
||||||
if path:
|
if path:
|
||||||
url += "/" + escape_path(path)
|
url += "/" + escape_path(path)
|
||||||
# todo: should it end with a slash?
|
# todo: should it end with a slash?
|
||||||
@ -63,7 +67,7 @@ class ManifestStreamer(LineOnlyReceiver, object):
|
|||||||
if self.in_error:
|
if self.in_error:
|
||||||
print(quote_output(line, quotemarks=False), file=stderr)
|
print(quote_output(line, quotemarks=False), file=stderr)
|
||||||
return
|
return
|
||||||
if line.startswith("ERROR:"):
|
if line.startswith(b"ERROR:"):
|
||||||
self.in_error = True
|
self.in_error = True
|
||||||
self.rc = 1
|
self.rc = 1
|
||||||
print(quote_output(line, quotemarks=False), file=stderr)
|
print(quote_output(line, quotemarks=False), file=stderr)
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import urllib
|
from past.builtins import unicode
|
||||||
|
|
||||||
|
from urllib.parse import quote as url_quote
|
||||||
from allmydata.scripts.common_http import do_http, check_http_error
|
from allmydata.scripts.common_http import do_http, check_http_error
|
||||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, UnknownAliasError
|
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, UnknownAliasError
|
||||||
from allmydata.util.encodingutil import quote_output
|
from allmydata.util.encodingutil import quote_output
|
||||||
@ -24,7 +26,7 @@ def mkdir(options):
|
|||||||
# create a new unlinked directory
|
# create a new unlinked directory
|
||||||
url = nodeurl + "uri?t=mkdir"
|
url = nodeurl + "uri?t=mkdir"
|
||||||
if options["format"]:
|
if options["format"]:
|
||||||
url += "&format=%s" % urllib.quote(options['format'])
|
url += "&format=%s" % url_quote(options['format'])
|
||||||
resp = do_http("POST", url)
|
resp = do_http("POST", url)
|
||||||
rc = check_http_error(resp, stderr)
|
rc = check_http_error(resp, stderr)
|
||||||
if rc:
|
if rc:
|
||||||
@ -35,13 +37,14 @@ def mkdir(options):
|
|||||||
return 0
|
return 0
|
||||||
|
|
||||||
# create a new directory at the given location
|
# create a new directory at the given location
|
||||||
|
path = unicode(path, "utf-8")
|
||||||
if path.endswith("/"):
|
if path.endswith("/"):
|
||||||
path = path[:-1]
|
path = path[:-1]
|
||||||
# path must be "/".join([s.encode("utf-8") for s in segments])
|
# path must be "/".join([s.encode("utf-8") for s in segments])
|
||||||
url = nodeurl + "uri/%s/%s?t=mkdir" % (urllib.quote(rootcap),
|
url = nodeurl + "uri/%s/%s?t=mkdir" % (url_quote(rootcap),
|
||||||
urllib.quote(path))
|
url_quote(path))
|
||||||
if options['format']:
|
if options['format']:
|
||||||
url += "&format=%s" % urllib.quote(options['format'])
|
url += "&format=%s" % url_quote(options['format'])
|
||||||
|
|
||||||
resp = do_http("POST", url)
|
resp = do_http("POST", url)
|
||||||
check_http_error(resp, stderr)
|
check_http_error(resp, stderr)
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import urllib
|
from urllib.parse import quote as url_quote
|
||||||
import json
|
import json
|
||||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||||
UnknownAliasError
|
UnknownAliasError
|
||||||
@ -25,7 +25,7 @@ def mv(options, mode="move"):
|
|||||||
except UnknownAliasError as e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
from_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
from_url = nodeurl + "uri/%s" % url_quote(rootcap)
|
||||||
if from_path:
|
if from_path:
|
||||||
from_url += "/" + escape_path(from_path)
|
from_url += "/" + escape_path(from_path)
|
||||||
# figure out the source cap
|
# figure out the source cap
|
||||||
@ -43,7 +43,7 @@ def mv(options, mode="move"):
|
|||||||
except UnknownAliasError as e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
to_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
to_url = nodeurl + "uri/%s" % url_quote(rootcap)
|
||||||
if path:
|
if path:
|
||||||
to_url += "/" + escape_path(path)
|
to_url += "/" + escape_path(path)
|
||||||
|
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from past.builtins import unicode
|
||||||
|
|
||||||
from six.moves import cStringIO as StringIO
|
from six.moves import cStringIO as StringIO
|
||||||
import urllib
|
from urllib.parse import quote as url_quote
|
||||||
|
|
||||||
from allmydata.scripts.common_http import do_http, format_http_success, format_http_error
|
from allmydata.scripts.common_http import do_http, format_http_success, format_http_error
|
||||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||||
@ -46,19 +48,20 @@ def put(options):
|
|||||||
|
|
||||||
# FIXME: don't hardcode cap format.
|
# FIXME: don't hardcode cap format.
|
||||||
if to_file.startswith("URI:MDMF:") or to_file.startswith("URI:SSK:"):
|
if to_file.startswith("URI:MDMF:") or to_file.startswith("URI:SSK:"):
|
||||||
url = nodeurl + "uri/%s" % urllib.quote(to_file)
|
url = nodeurl + "uri/%s" % url_quote(to_file)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
|
rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
|
||||||
except UnknownAliasError as e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
|
path = unicode(path, "utf-8")
|
||||||
if path.startswith("/"):
|
if path.startswith("/"):
|
||||||
suggestion = to_file.replace(u"/", u"", 1)
|
suggestion = to_file.replace(u"/", u"", 1)
|
||||||
print("Error: The remote filename must not start with a slash", file=stderr)
|
print("Error: The remote filename must not start with a slash", file=stderr)
|
||||||
print("Please try again, perhaps with %s" % quote_output(suggestion), file=stderr)
|
print("Please try again, perhaps with %s" % quote_output(suggestion), file=stderr)
|
||||||
return 1
|
return 1
|
||||||
url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
|
url = nodeurl + "uri/%s/" % url_quote(rootcap)
|
||||||
if path:
|
if path:
|
||||||
url += escape_path(path)
|
url += escape_path(path)
|
||||||
else:
|
else:
|
||||||
|
@ -1,3 +1,13 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
from six import ensure_text
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import json
|
import json
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
@ -5,20 +15,21 @@ from six.moves import cStringIO as StringIO
|
|||||||
|
|
||||||
from allmydata import uri
|
from allmydata import uri
|
||||||
from allmydata.util import base32
|
from allmydata.util import base32
|
||||||
from allmydata.util.encodingutil import quote_output, to_bytes
|
from allmydata.util.encodingutil import to_bytes
|
||||||
from allmydata.mutable.publish import MutableData
|
from allmydata.mutable.publish import MutableData
|
||||||
from allmydata.immutable import upload
|
from allmydata.immutable import upload
|
||||||
from allmydata.scripts import debug
|
from allmydata.scripts import debug
|
||||||
from ..no_network import GridTestMixin
|
from ..no_network import GridTestMixin
|
||||||
from .common import CLITestMixin
|
from .common import CLITestMixin
|
||||||
|
|
||||||
|
|
||||||
class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||||
|
|
||||||
def test_check(self):
|
def test_check(self):
|
||||||
self.basedir = "cli/Check/check"
|
self.basedir = "cli/Check/check"
|
||||||
self.set_up_grid()
|
self.set_up_grid()
|
||||||
c0 = self.g.clients[0]
|
c0 = self.g.clients[0]
|
||||||
DATA = "data" * 100
|
DATA = b"data" * 100
|
||||||
DATA_uploadable = MutableData(DATA)
|
DATA_uploadable = MutableData(DATA)
|
||||||
d = c0.create_mutable_file(DATA_uploadable)
|
d = c0.create_mutable_file(DATA_uploadable)
|
||||||
def _stash_uri(n):
|
def _stash_uri(n):
|
||||||
@ -28,7 +39,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
d.addCallback(lambda ign: self.do_cli("check", self.uri))
|
d.addCallback(lambda ign: self.do_cli("check", self.uri))
|
||||||
def _check1(args):
|
def _check1(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
lines = out.splitlines()
|
lines = out.splitlines()
|
||||||
self.failUnless("Summary: Healthy" in lines, out)
|
self.failUnless("Summary: Healthy" in lines, out)
|
||||||
@ -38,14 +49,14 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
d.addCallback(lambda ign: self.do_cli("check", "--raw", self.uri))
|
d.addCallback(lambda ign: self.do_cli("check", "--raw", self.uri))
|
||||||
def _check2(args):
|
def _check2(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
data = json.loads(out)
|
data = json.loads(out)
|
||||||
self.failUnlessReallyEqual(to_bytes(data["summary"]), "Healthy")
|
self.failUnlessReallyEqual(to_bytes(data["summary"]), b"Healthy")
|
||||||
self.failUnlessReallyEqual(data["results"]["healthy"], True)
|
self.failUnlessReallyEqual(data["results"]["healthy"], True)
|
||||||
d.addCallback(_check2)
|
d.addCallback(_check2)
|
||||||
|
|
||||||
d.addCallback(lambda ign: c0.upload(upload.Data("literal", convergence="")))
|
d.addCallback(lambda ign: c0.upload(upload.Data(b"literal", convergence=b"")))
|
||||||
def _stash_lit_uri(n):
|
def _stash_lit_uri(n):
|
||||||
self.lit_uri = n.get_uri()
|
self.lit_uri = n.get_uri()
|
||||||
d.addCallback(_stash_lit_uri)
|
d.addCallback(_stash_lit_uri)
|
||||||
@ -53,7 +64,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
d.addCallback(lambda ign: self.do_cli("check", self.lit_uri))
|
d.addCallback(lambda ign: self.do_cli("check", self.lit_uri))
|
||||||
def _check_lit(args):
|
def _check_lit(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
lines = out.splitlines()
|
lines = out.splitlines()
|
||||||
self.failUnless("Summary: Healthy (LIT)" in lines, out)
|
self.failUnless("Summary: Healthy (LIT)" in lines, out)
|
||||||
@ -62,13 +73,13 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
d.addCallback(lambda ign: self.do_cli("check", "--raw", self.lit_uri))
|
d.addCallback(lambda ign: self.do_cli("check", "--raw", self.lit_uri))
|
||||||
def _check_lit_raw(args):
|
def _check_lit_raw(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
data = json.loads(out)
|
data = json.loads(out)
|
||||||
self.failUnlessReallyEqual(data["results"]["healthy"], True)
|
self.failUnlessReallyEqual(data["results"]["healthy"], True)
|
||||||
d.addCallback(_check_lit_raw)
|
d.addCallback(_check_lit_raw)
|
||||||
|
|
||||||
d.addCallback(lambda ign: c0.create_immutable_dirnode({}, convergence=""))
|
d.addCallback(lambda ign: c0.create_immutable_dirnode({}, convergence=b""))
|
||||||
def _stash_lit_dir_uri(n):
|
def _stash_lit_dir_uri(n):
|
||||||
self.lit_dir_uri = n.get_uri()
|
self.lit_dir_uri = n.get_uri()
|
||||||
d.addCallback(_stash_lit_dir_uri)
|
d.addCallback(_stash_lit_dir_uri)
|
||||||
@ -89,16 +100,16 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
cso.parseOptions([shares[1][2]])
|
cso.parseOptions([shares[1][2]])
|
||||||
storage_index = uri.from_string(self.uri).get_storage_index()
|
storage_index = uri.from_string(self.uri).get_storage_index()
|
||||||
self._corrupt_share_line = " server %s, SI %s, shnum %d" % \
|
self._corrupt_share_line = " server %s, SI %s, shnum %d" % \
|
||||||
(base32.b2a(shares[1][1]),
|
(str(base32.b2a(shares[1][1]), "ascii"),
|
||||||
base32.b2a(storage_index),
|
str(base32.b2a(storage_index), "ascii"),
|
||||||
shares[1][0])
|
shares[1][0])
|
||||||
debug.corrupt_share(cso)
|
debug.corrupt_share(cso)
|
||||||
d.addCallback(_clobber_shares)
|
d.addCallback(_clobber_shares)
|
||||||
|
|
||||||
d.addCallback(lambda ign: self.do_cli("check", "--verify", self.uri))
|
d.addCallback(lambda ign: self.do_cli("check", "--verify", self.uri))
|
||||||
def _check3(args):
|
def _check3(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
lines = out.splitlines()
|
lines = out.splitlines()
|
||||||
summary = [l for l in lines if l.startswith("Summary")][0]
|
summary = [l for l in lines if l.startswith("Summary")][0]
|
||||||
@ -112,7 +123,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
d.addCallback(lambda ign: self.do_cli("check", "--verify", "--raw", self.uri))
|
d.addCallback(lambda ign: self.do_cli("check", "--verify", "--raw", self.uri))
|
||||||
def _check3_raw(args):
|
def _check3_raw(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
data = json.loads(out)
|
data = json.loads(out)
|
||||||
self.failUnlessReallyEqual(data["results"]["healthy"], False)
|
self.failUnlessReallyEqual(data["results"]["healthy"], False)
|
||||||
@ -126,7 +137,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.do_cli("check", "--verify", "--repair", self.uri))
|
self.do_cli("check", "--verify", "--repair", self.uri))
|
||||||
def _check4(args):
|
def _check4(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
lines = out.splitlines()
|
lines = out.splitlines()
|
||||||
self.failUnless("Summary: not healthy" in lines, out)
|
self.failUnless("Summary: not healthy" in lines, out)
|
||||||
@ -140,7 +151,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.do_cli("check", "--verify", "--repair", self.uri))
|
self.do_cli("check", "--verify", "--repair", self.uri))
|
||||||
def _check5(args):
|
def _check5(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
lines = out.splitlines()
|
lines = out.splitlines()
|
||||||
self.failUnless("Summary: healthy" in lines, out)
|
self.failUnless("Summary: healthy" in lines, out)
|
||||||
@ -156,14 +167,14 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
c0 = self.g.clients[0]
|
c0 = self.g.clients[0]
|
||||||
self.uris = {}
|
self.uris = {}
|
||||||
self.fileurls = {}
|
self.fileurls = {}
|
||||||
DATA = "data" * 100
|
DATA = b"data" * 100
|
||||||
quoted_good = quote_output(u"g\u00F6\u00F6d")
|
quoted_good = u"'g\u00F6\u00F6d'"
|
||||||
|
|
||||||
d = c0.create_dirnode()
|
d = c0.create_dirnode()
|
||||||
def _stash_root_and_create_file(n):
|
def _stash_root_and_create_file(n):
|
||||||
self.rootnode = n
|
self.rootnode = n
|
||||||
self.rooturi = n.get_uri()
|
self.rooturi = n.get_uri()
|
||||||
return n.add_file(u"g\u00F6\u00F6d", upload.Data(DATA, convergence=""))
|
return n.add_file(u"g\u00F6\u00F6d", upload.Data(DATA, convergence=b""))
|
||||||
d.addCallback(_stash_root_and_create_file)
|
d.addCallback(_stash_root_and_create_file)
|
||||||
def _stash_uri(fn, which):
|
def _stash_uri(fn, which):
|
||||||
self.uris[which] = fn.get_uri()
|
self.uris[which] = fn.get_uri()
|
||||||
@ -171,18 +182,18 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
d.addCallback(_stash_uri, u"g\u00F6\u00F6d")
|
d.addCallback(_stash_uri, u"g\u00F6\u00F6d")
|
||||||
d.addCallback(lambda ign:
|
d.addCallback(lambda ign:
|
||||||
self.rootnode.add_file(u"small",
|
self.rootnode.add_file(u"small",
|
||||||
upload.Data("literal",
|
upload.Data(b"literal",
|
||||||
convergence="")))
|
convergence=b"")))
|
||||||
d.addCallback(_stash_uri, "small")
|
d.addCallback(_stash_uri, "small")
|
||||||
d.addCallback(lambda ign:
|
d.addCallback(lambda ign:
|
||||||
c0.create_mutable_file(MutableData(DATA+"1")))
|
c0.create_mutable_file(MutableData(DATA+b"1")))
|
||||||
d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
|
d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
|
||||||
d.addCallback(_stash_uri, "mutable")
|
d.addCallback(_stash_uri, "mutable")
|
||||||
|
|
||||||
d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
|
d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
|
||||||
def _check1(args):
|
def _check1(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
lines = out.splitlines()
|
lines = out.splitlines()
|
||||||
self.failUnless("done: 4 objects checked, 4 healthy, 0 unhealthy"
|
self.failUnless("done: 4 objects checked, 4 healthy, 0 unhealthy"
|
||||||
@ -198,8 +209,9 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.rooturi))
|
self.rooturi))
|
||||||
def _check2(args):
|
def _check2(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
|
out = ensure_text(out)
|
||||||
lines = out.splitlines()
|
lines = out.splitlines()
|
||||||
self.failUnless("'<root>': Healthy" in lines, out)
|
self.failUnless("'<root>': Healthy" in lines, out)
|
||||||
self.failUnless("'small': Healthy (LIT)" in lines, out)
|
self.failUnless("'small': Healthy (LIT)" in lines, out)
|
||||||
@ -212,7 +224,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
d.addCallback(lambda ign: self.do_cli("stats", self.rooturi))
|
d.addCallback(lambda ign: self.do_cli("stats", self.rooturi))
|
||||||
def _check_stats(args):
|
def _check_stats(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
lines = out.splitlines()
|
lines = out.splitlines()
|
||||||
self.failUnlessIn(" count-immutable-files: 1", lines)
|
self.failUnlessIn(" count-immutable-files: 1", lines)
|
||||||
@ -236,8 +248,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
cso.parseOptions([shares[1][2]])
|
cso.parseOptions([shares[1][2]])
|
||||||
storage_index = uri.from_string(self.uris["mutable"]).get_storage_index()
|
storage_index = uri.from_string(self.uris["mutable"]).get_storage_index()
|
||||||
self._corrupt_share_line = " corrupt: server %s, SI %s, shnum %d" % \
|
self._corrupt_share_line = " corrupt: server %s, SI %s, shnum %d" % \
|
||||||
(base32.b2a(shares[1][1]),
|
(str(base32.b2a(shares[1][1]), "ascii"),
|
||||||
base32.b2a(storage_index),
|
str(base32.b2a(storage_index), "ascii"),
|
||||||
shares[1][0])
|
shares[1][0])
|
||||||
debug.corrupt_share(cso)
|
debug.corrupt_share(cso)
|
||||||
d.addCallback(_clobber_shares)
|
d.addCallback(_clobber_shares)
|
||||||
@ -251,8 +263,9 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.do_cli("deep-check", "--verbose", self.rooturi))
|
self.do_cli("deep-check", "--verbose", self.rooturi))
|
||||||
def _check3(args):
|
def _check3(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
|
out = ensure_text(out)
|
||||||
lines = out.splitlines()
|
lines = out.splitlines()
|
||||||
self.failUnless("'<root>': Healthy" in lines, out)
|
self.failUnless("'<root>': Healthy" in lines, out)
|
||||||
self.failUnless("'small': Healthy (LIT)" in lines, out)
|
self.failUnless("'small': Healthy (LIT)" in lines, out)
|
||||||
@ -268,8 +281,9 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.rooturi))
|
self.rooturi))
|
||||||
def _check4(args):
|
def _check4(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
|
out = ensure_text(out)
|
||||||
lines = out.splitlines()
|
lines = out.splitlines()
|
||||||
self.failUnless("'<root>': Healthy" in lines, out)
|
self.failUnless("'<root>': Healthy" in lines, out)
|
||||||
self.failUnless("'small': Healthy (LIT)" in lines, out)
|
self.failUnless("'small': Healthy (LIT)" in lines, out)
|
||||||
@ -287,7 +301,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.rooturi))
|
self.rooturi))
|
||||||
def _check5(args):
|
def _check5(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
lines = out.splitlines()
|
lines = out.splitlines()
|
||||||
units = [json.loads(line) for line in lines]
|
units = [json.loads(line) for line in lines]
|
||||||
@ -301,8 +315,9 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.rooturi))
|
self.rooturi))
|
||||||
def _check6(args):
|
def _check6(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
|
out = ensure_text(out)
|
||||||
lines = out.splitlines()
|
lines = out.splitlines()
|
||||||
self.failUnless("'<root>': healthy" in lines, out)
|
self.failUnless("'<root>': healthy" in lines, out)
|
||||||
self.failUnless("'small': healthy" in lines, out)
|
self.failUnless("'small': healthy" in lines, out)
|
||||||
@ -322,10 +337,10 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"subdir"))
|
d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"subdir"))
|
||||||
d.addCallback(_stash_uri, "subdir")
|
d.addCallback(_stash_uri, "subdir")
|
||||||
d.addCallback(lambda fn:
|
d.addCallback(lambda fn:
|
||||||
fn.add_file(u"subfile", upload.Data(DATA+"2", "")))
|
fn.add_file(u"subfile", upload.Data(DATA+b"2", b"")))
|
||||||
d.addCallback(lambda ign:
|
d.addCallback(lambda ign:
|
||||||
self.delete_shares_numbered(self.uris["subdir"],
|
self.delete_shares_numbered(self.uris["subdir"],
|
||||||
range(10)))
|
list(range(10))))
|
||||||
|
|
||||||
# root
|
# root
|
||||||
# rootg\u00F6\u00F6d/
|
# rootg\u00F6\u00F6d/
|
||||||
@ -340,7 +355,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.failIfEqual(rc, 0)
|
self.failIfEqual(rc, 0)
|
||||||
self.failUnlessIn("ERROR: UnrecoverableFileError", err)
|
self.failUnlessIn("ERROR: UnrecoverableFileError", err)
|
||||||
# the fatal directory should still show up, as the last line
|
# the fatal directory should still show up, as the last line
|
||||||
self.failUnlessIn(" subdir\n", out)
|
self.failUnlessIn(" subdir\n", ensure_text(out))
|
||||||
d.addCallback(_manifest_failed)
|
d.addCallback(_manifest_failed)
|
||||||
|
|
||||||
d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
|
d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
|
||||||
@ -379,7 +394,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(rc, 1)
|
self.failUnlessReallyEqual(rc, 1)
|
||||||
self.failUnlessIn("error:", err)
|
self.failUnlessIn("error:", err)
|
||||||
self.failUnlessReallyEqual(out, "")
|
self.assertEqual(len(out), 0, out)
|
||||||
d.addCallback(_check)
|
d.addCallback(_check)
|
||||||
d.addCallback(lambda ign: self.do_cli("deep-check"))
|
d.addCallback(lambda ign: self.do_cli("deep-check"))
|
||||||
d.addCallback(_check)
|
d.addCallback(_check)
|
||||||
@ -396,7 +411,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.failUnlessReallyEqual(rc, 1)
|
self.failUnlessReallyEqual(rc, 1)
|
||||||
self.failUnlessIn("error:", err)
|
self.failUnlessIn("error:", err)
|
||||||
self.failUnlessIn("nonexistent", err)
|
self.failUnlessIn("nonexistent", err)
|
||||||
self.failUnlessReallyEqual(out, "")
|
self.assertEqual(len(out), 0, out)
|
||||||
d.addCallback(_check)
|
d.addCallback(_check)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
@ -416,10 +431,10 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
def _check(args):
|
def _check(args):
|
||||||
(rc, out, err) = args
|
(rc, out, err) = args
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.assertEqual(len(err), 0, err)
|
||||||
#Ensure healthy appears for each uri
|
#Ensure healthy appears for each uri
|
||||||
self.failUnlessIn("Healthy", out[:len(out)/2])
|
self.failUnlessIn("Healthy", out[:len(out)//2])
|
||||||
self.failUnlessIn("Healthy", out[len(out)/2:])
|
self.failUnlessIn("Healthy", out[len(out)//2:])
|
||||||
d.addCallback(_check)
|
d.addCallback(_check)
|
||||||
|
|
||||||
d.addCallback(lambda ign: self.do_cli("check", self.uriList[0], "nonexistent:"))
|
d.addCallback(lambda ign: self.do_cli("check", self.uriList[0], "nonexistent:"))
|
||||||
|
@ -1,4 +1,14 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import os.path, json
|
import os.path, json
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
@ -24,12 +34,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
def test_unicode_filename(self):
|
def test_unicode_filename(self):
|
||||||
self.basedir = "cli/Cp/unicode_filename"
|
self.basedir = "cli/Cp/unicode_filename"
|
||||||
|
|
||||||
fn1 = os.path.join(unicode(self.basedir), u"\u00C4rtonwall")
|
fn1 = os.path.join(self.basedir, u"\u00C4rtonwall")
|
||||||
try:
|
artonwall_arg = u"\u00C4rtonwall"
|
||||||
fn1_arg = fn1.encode(get_io_encoding())
|
|
||||||
artonwall_arg = u"\u00C4rtonwall".encode(get_io_encoding())
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.")
|
|
||||||
|
|
||||||
skip_if_cannot_represent_filename(fn1)
|
skip_if_cannot_represent_filename(fn1)
|
||||||
|
|
||||||
@ -44,15 +50,15 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
|
|
||||||
d = self.do_cli("create-alias", "tahoe")
|
d = self.do_cli("create-alias", "tahoe")
|
||||||
|
|
||||||
d.addCallback(lambda res: self.do_cli("cp", fn1_arg, "tahoe:"))
|
d.addCallback(lambda res: self.do_cli("cp", fn1, "tahoe:"))
|
||||||
|
|
||||||
d.addCallback(lambda res: self.do_cli("get", "tahoe:" + artonwall_arg))
|
d.addCallback(lambda res: self.do_cli("get", "tahoe:" + artonwall_arg))
|
||||||
d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA1))
|
d.addCallback(lambda rc_out_err: self.assertEqual(rc_out_err[1], DATA1))
|
||||||
|
|
||||||
d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:"))
|
d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:"))
|
||||||
|
|
||||||
d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica"))
|
d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica"))
|
||||||
d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA2))
|
d.addCallback(lambda rc_out_err: self.assertEqual(rc_out_err[1], DATA2))
|
||||||
|
|
||||||
d.addCallback(lambda res: self.do_cli("ls", "tahoe:"))
|
d.addCallback(lambda res: self.do_cli("ls", "tahoe:"))
|
||||||
def _check(args):
|
def _check(args):
|
||||||
@ -66,8 +72,10 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.failUnlessIn("files whose names could not be converted", err)
|
self.failUnlessIn("files whose names could not be converted", err)
|
||||||
else:
|
else:
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
self.failUnlessReallyEqual(out.decode(get_io_encoding()), u"Metallica\n\u00C4rtonwall\n")
|
if PY2:
|
||||||
self.failUnlessReallyEqual(err, "")
|
out = out.decode(get_io_encoding())
|
||||||
|
self.failUnlessReallyEqual(out, u"Metallica\n\u00C4rtonwall\n")
|
||||||
|
self.assertEqual(len(err), 0, err)
|
||||||
d.addCallback(_check)
|
d.addCallback(_check)
|
||||||
|
|
||||||
return d
|
return d
|
||||||
@ -98,7 +106,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
fn1 = os.path.join(self.basedir, "Metallica")
|
fn1 = os.path.join(self.basedir, "Metallica")
|
||||||
fn2 = os.path.join(outdir, "Not Metallica")
|
fn2 = os.path.join(outdir, "Not Metallica")
|
||||||
fn3 = os.path.join(outdir, "test2")
|
fn3 = os.path.join(outdir, "test2")
|
||||||
DATA1 = "puppies" * 10000
|
DATA1 = b"puppies" * 10000
|
||||||
fileutil.write(fn1, DATA1)
|
fileutil.write(fn1, DATA1)
|
||||||
|
|
||||||
d = self.do_cli("create-alias", "tahoe")
|
d = self.do_cli("create-alias", "tahoe")
|
||||||
@ -128,7 +136,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.failUnlessReallyEqual(rc, 1)
|
self.failUnlessReallyEqual(rc, 1)
|
||||||
self.failUnlessIn("when copying into a directory, all source files must have names, but",
|
self.failUnlessIn("when copying into a directory, all source files must have names, but",
|
||||||
err)
|
err)
|
||||||
self.failUnlessReallyEqual(out, "")
|
self.assertEqual(len(out), 0, out)
|
||||||
d.addCallback(_resp)
|
d.addCallback(_resp)
|
||||||
|
|
||||||
# Create a directory, linked at tahoe:test .
|
# Create a directory, linked at tahoe:test .
|
||||||
@ -200,13 +208,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
def test_unicode_dirnames(self):
|
def test_unicode_dirnames(self):
|
||||||
self.basedir = "cli/Cp/unicode_dirnames"
|
self.basedir = "cli/Cp/unicode_dirnames"
|
||||||
|
|
||||||
fn1 = os.path.join(unicode(self.basedir), u"\u00C4rtonwall")
|
fn1 = os.path.join(self.basedir, u"\u00C4rtonwall")
|
||||||
try:
|
artonwall_arg = u"\u00C4rtonwall"
|
||||||
fn1_arg = fn1.encode(get_io_encoding())
|
|
||||||
del fn1_arg # hush pyflakes
|
|
||||||
artonwall_arg = u"\u00C4rtonwall".encode(get_io_encoding())
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.")
|
|
||||||
|
|
||||||
skip_if_cannot_represent_filename(fn1)
|
skip_if_cannot_represent_filename(fn1)
|
||||||
|
|
||||||
@ -222,13 +225,15 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
unicode_to_output(u"\u00C4rtonwall")
|
unicode_to_output(u"\u00C4rtonwall")
|
||||||
except UnicodeEncodeError:
|
except UnicodeEncodeError:
|
||||||
self.failUnlessReallyEqual(rc, 1)
|
self.failUnlessReallyEqual(rc, 1)
|
||||||
self.failUnlessReallyEqual(out, "")
|
self.assertEqual(len(out), 0, out)
|
||||||
self.failUnlessIn(quote_output(u"\u00C4rtonwall"), err)
|
self.failUnlessIn(quote_output(u"\u00C4rtonwall"), err)
|
||||||
self.failUnlessIn("files whose names could not be converted", err)
|
self.failUnlessIn("files whose names could not be converted", err)
|
||||||
else:
|
else:
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
self.failUnlessReallyEqual(out.decode(get_io_encoding()), u"\u00C4rtonwall\n")
|
if PY2:
|
||||||
self.failUnlessReallyEqual(err, "")
|
out = out.decode(get_io_encoding())
|
||||||
|
self.failUnlessReallyEqual(out, u"\u00C4rtonwall\n")
|
||||||
|
self.assertEqual(len(err), 0, err)
|
||||||
d.addCallback(_check)
|
d.addCallback(_check)
|
||||||
|
|
||||||
return d
|
return d
|
||||||
@ -818,9 +823,9 @@ cp -r $DIRCAP5 $DIRCAP6 to : E9-COLLIDING-TARGETS
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
class CopyOut(GridTestMixin, CLITestMixin, unittest.TestCase):
|
class CopyOut(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||||
FILE_CONTENTS = "file text"
|
FILE_CONTENTS = b"file text"
|
||||||
FILE_CONTENTS_5 = "5"
|
FILE_CONTENTS_5 = b"5"
|
||||||
FILE_CONTENTS_6 = "6"
|
FILE_CONTENTS_6 = b"6"
|
||||||
|
|
||||||
def do_setup(self):
|
def do_setup(self):
|
||||||
# first we build a tahoe filesystem that contains:
|
# first we build a tahoe filesystem that contains:
|
||||||
|
@ -176,6 +176,8 @@ PORTED_TEST_MODULES = [
|
|||||||
"allmydata.test.cli.test_alias",
|
"allmydata.test.cli.test_alias",
|
||||||
"allmydata.test.cli.test_backup",
|
"allmydata.test.cli.test_backup",
|
||||||
"allmydata.test.cli.test_backupdb",
|
"allmydata.test.cli.test_backupdb",
|
||||||
|
"allmydata.test.cli.test_check",
|
||||||
|
"allmydata.test.cli.test_cp",
|
||||||
"allmydata.test.cli.test_create",
|
"allmydata.test.cli.test_create",
|
||||||
"allmydata.test.cli.test_invite",
|
"allmydata.test.cli.test_invite",
|
||||||
"allmydata.test.cli.test_status",
|
"allmydata.test.cli.test_status",
|
||||||
|
Loading…
Reference in New Issue
Block a user