mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-20 13:33:09 +00:00
Some progress towards passing backup tests on Python 3.
This commit is contained in:
parent
9a17c4a5d2
commit
d6406d5edb
@ -351,7 +351,7 @@ class BackupOptions(FileStoreOptions):
|
||||
line. The file is assumed to be in the argv encoding."""
|
||||
abs_filepath = argv_to_abspath(filepath)
|
||||
try:
|
||||
exclude_file = file(abs_filepath)
|
||||
exclude_file = open(abs_filepath)
|
||||
except:
|
||||
raise BackupConfigurationError('Error opening exclude file %s.' % quote_local_unicode_path(abs_filepath))
|
||||
try:
|
||||
|
@ -1,7 +1,7 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import print_function
|
||||
from six import ensure_str
|
||||
from six import ensure_binary
|
||||
|
||||
import os, sys, textwrap
|
||||
import codecs
|
||||
@ -22,6 +22,8 @@ from yaml import (
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import str # noqa: F401
|
||||
else:
|
||||
from typing import Union
|
||||
|
||||
from twisted.python import usage
|
||||
|
||||
@ -274,18 +276,17 @@ def get_alias(aliases, path_unicode, default):
|
||||
return uri.from_string_dirnode(aliases[alias]).to_string(), path[colon+1:]
|
||||
|
||||
def escape_path(path):
|
||||
# type: (str) -> str
|
||||
# type: (Union[str,bytes]) -> bytes
|
||||
u"""
|
||||
Return path quoted to US-ASCII, valid URL characters.
|
||||
|
||||
>>> path = u'/føö/bar/☃'
|
||||
>>> escaped = escape_path(path)
|
||||
>>> str(escaped)
|
||||
'/f%C3%B8%C3%B6/bar/%E2%98%83'
|
||||
>>> escaped.encode('ascii').decode('ascii') == escaped
|
||||
True
|
||||
>>> escaped
|
||||
b'/f%C3%B8%C3%B6/bar/%E2%98%83'
|
||||
"""
|
||||
segments = path.split("/")
|
||||
result = "/".join([urllib.parse.quote(unicode_to_url(s)) for s in segments])
|
||||
result = ensure_str(result, "ascii")
|
||||
return result
|
||||
if isinstance(path, str):
|
||||
path = path.encode("utf-8")
|
||||
segments = path.split(b"/")
|
||||
return b"/".join([urllib.parse.quote(s).encode("ascii") for s in segments])
|
||||
|
||||
|
@ -2,7 +2,7 @@ from __future__ import print_function
|
||||
|
||||
import os.path
|
||||
import time
|
||||
import urllib
|
||||
from urllib.parse import quote as url_quote
|
||||
import json
|
||||
import datetime
|
||||
from allmydata.scripts.common import get_alias, escape_path, DEFAULT_ALIAS, \
|
||||
@ -52,7 +52,7 @@ def mkdir(contents, options):
|
||||
|
||||
def put_child(dirurl, childname, childcap):
|
||||
assert dirurl[-1] != "/"
|
||||
url = dirurl + "/" + urllib.quote(unicode_to_url(childname)) + "?t=uri"
|
||||
url = dirurl + "/" + url_quote(unicode_to_url(childname)) + "?t=uri"
|
||||
resp = do_http("PUT", url, childcap)
|
||||
if resp.status not in (200, 201):
|
||||
raise HTTPError("Error during put_child", resp)
|
||||
@ -97,7 +97,7 @@ class BackerUpper(object):
|
||||
except UnknownAliasError as e:
|
||||
e.display(stderr)
|
||||
return 1
|
||||
to_url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
|
||||
to_url = nodeurl + "uri/%s/" % url_quote(rootcap)
|
||||
if path:
|
||||
to_url += escape_path(path)
|
||||
if not to_url.endswith("/"):
|
||||
@ -192,7 +192,7 @@ class BackerUpper(object):
|
||||
filecap = r.was_uploaded()
|
||||
self.verboseprint("checking %s" % quote_output(filecap))
|
||||
nodeurl = self.options['node-url']
|
||||
checkurl = nodeurl + "uri/%s?t=check&output=JSON" % urllib.quote(filecap)
|
||||
checkurl = nodeurl + "uri/%s?t=check&output=JSON" % url_quote(filecap)
|
||||
self._files_checked += 1
|
||||
resp = do_http("POST", checkurl)
|
||||
if resp.status != 200:
|
||||
@ -225,7 +225,7 @@ class BackerUpper(object):
|
||||
dircap = r.was_created()
|
||||
self.verboseprint("checking %s" % quote_output(dircap))
|
||||
nodeurl = self.options['node-url']
|
||||
checkurl = nodeurl + "uri/%s?t=check&output=JSON" % urllib.quote(dircap)
|
||||
checkurl = nodeurl + "uri/%s?t=check&output=JSON" % url_quote(dircap)
|
||||
self._directories_checked += 1
|
||||
resp = do_http("POST", checkurl)
|
||||
if resp.status != 200:
|
||||
|
@ -1,3 +1,9 @@
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
import __builtin__ as builtins
|
||||
else:
|
||||
import builtins
|
||||
|
||||
import os.path
|
||||
from six.moves import cStringIO as StringIO
|
||||
from datetime import timedelta
|
||||
@ -6,7 +12,6 @@ import re
|
||||
from twisted.trial import unittest
|
||||
from twisted.python.monkey import MonkeyPatcher
|
||||
|
||||
import __builtin__
|
||||
from allmydata.util import fileutil
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.encodingutil import get_io_encoding, unicode_to_argv
|
||||
@ -407,12 +412,16 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
|
||||
|
||||
ns = Namespace()
|
||||
ns.called = False
|
||||
original_open = open
|
||||
def call_file(name, *args):
|
||||
ns.called = True
|
||||
self.failUnlessEqual(name, abspath_expanduser_unicode(exclude_file))
|
||||
return StringIO()
|
||||
if name.endswith("excludes.dummy"):
|
||||
ns.called = True
|
||||
self.failUnlessEqual(name, abspath_expanduser_unicode(exclude_file))
|
||||
return StringIO()
|
||||
else:
|
||||
return original_open(name, *args)
|
||||
|
||||
patcher = MonkeyPatcher((__builtin__, 'file', call_file))
|
||||
patcher = MonkeyPatcher((builtins, 'open', call_file))
|
||||
patcher.runWithPatches(parse_options, basedir, "backup", ['--exclude-from', unicode_to_argv(exclude_file), 'from', 'to'])
|
||||
self.failUnless(ns.called)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user