mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-23 14:52:26 +00:00
Merge pull request #1361 from a-detiste/remove-future
Remove most of the usage of future Fixes ticket:4093
This commit is contained in:
commit
432e041d9d
misc/coding_tools
newsfragments
src/allmydata
_monkeypatch.pycheck_results.pyuri.py
crypto
frontends
immutable
interfaces.pyintroducer
node.pyscripts
default_nodedir.pyrunner.pyslow_operation.pytahoe_check.pytahoe_get.pytahoe_manifest.pytahoe_status.py
storage
test
cli
common_util.pymutable
storage_plugin.pytest_common_util.pytest_crawler.pytest_crypto.pytest_dirnode.pytest_download.pytest_encodingutil.pytest_humanreadable.pytest_i2p_provider.pytest_runner.pytest_spans.pytest_statistics.pytest_storage_web.pytest_time_format.pytest_tor_provider.pytest_util.pyweb
util
web
windows
@ -24,7 +24,7 @@
|
||||
import os, sys, subprocess, json, tempfile, zipfile, re, itertools
|
||||
import email.parser
|
||||
from pprint import pprint
|
||||
from six.moves import StringIO
|
||||
from io import StringIO
|
||||
import click
|
||||
|
||||
all_packages = {} # name -> version
|
||||
|
0
newsfragments/4093.minor
Normal file
0
newsfragments/4093.minor
Normal file
@ -4,13 +4,5 @@ Monkey-patching of third party libraries.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY2
|
||||
|
||||
|
||||
|
||||
def patch():
|
||||
"""Path third-party libraries to make Tahoe-LAFS work."""
|
||||
|
||||
if not PY2:
|
||||
# Python 3 doesn't need to monkey patch Foolscap
|
||||
return
|
||||
|
@ -1,8 +1,6 @@
|
||||
"""Ported to Python 3.
|
||||
"""
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
from zope.interface import implementer
|
||||
from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
|
||||
IDeepCheckResults, IDeepCheckAndRepairResults, IURI, IDisplayableServer
|
||||
@ -63,8 +61,8 @@ class CheckResults(object):
|
||||
# On Python 2, we can mix bytes and Unicode. On Python 3, we want
|
||||
# unicode.
|
||||
if isinstance(summary, bytes):
|
||||
summary = unicode(summary, "utf-8")
|
||||
assert isinstance(summary, unicode) # should be a single string
|
||||
summary = str(summary, "utf-8")
|
||||
assert isinstance(summary, str) # should be a single string
|
||||
self._summary = summary
|
||||
assert not isinstance(report, str) # should be list of strings
|
||||
self._report = report
|
||||
|
@ -10,8 +10,6 @@ objects that `cryptography` documents.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
import six
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.ciphers import (
|
||||
Cipher,
|
||||
@ -79,7 +77,7 @@ def encrypt_data(encryptor, plaintext):
|
||||
"""
|
||||
|
||||
_validate_cryptor(encryptor, encrypt=True)
|
||||
if not isinstance(plaintext, six.binary_type):
|
||||
if not isinstance(plaintext, bytes):
|
||||
raise ValueError('Plaintext must be bytes')
|
||||
|
||||
return encryptor.update(plaintext)
|
||||
@ -118,7 +116,7 @@ def decrypt_data(decryptor, plaintext):
|
||||
"""
|
||||
|
||||
_validate_cryptor(decryptor, encrypt=False)
|
||||
if not isinstance(plaintext, six.binary_type):
|
||||
if not isinstance(plaintext, bytes):
|
||||
raise ValueError('Plaintext must be bytes')
|
||||
|
||||
return decryptor.update(plaintext)
|
||||
@ -160,7 +158,7 @@ def _validate_key(key):
|
||||
"""
|
||||
confirm `key` is suitable for AES encryption, or raise ValueError
|
||||
"""
|
||||
if not isinstance(key, six.binary_type):
|
||||
if not isinstance(key, bytes):
|
||||
raise TypeError('Key must be bytes')
|
||||
if len(key) not in (16, 32):
|
||||
raise ValueError('Key must be 16 or 32 bytes long')
|
||||
@ -175,7 +173,7 @@ def _validate_iv(iv):
|
||||
"""
|
||||
if iv is None:
|
||||
return DEFAULT_IV
|
||||
if not isinstance(iv, six.binary_type):
|
||||
if not isinstance(iv, bytes):
|
||||
raise TypeError('IV must be bytes')
|
||||
if len(iv) != 16:
|
||||
raise ValueError('IV must be 16 bytes long')
|
||||
|
@ -45,9 +45,6 @@ noisy = True
|
||||
from allmydata.util.log import NOISY, OPERATIONAL, WEIRD, \
|
||||
msg as logmsg, PrefixingLogMixin
|
||||
|
||||
if six.PY3:
|
||||
long = int
|
||||
|
||||
|
||||
def createSFTPError(errorCode, errorMessage):
|
||||
"""
|
||||
|
@ -4,8 +4,6 @@ Ported to Python 3.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from future.utils import native_str
|
||||
from past.builtins import long, unicode
|
||||
from six import ensure_str
|
||||
|
||||
import os, time, weakref, itertools
|
||||
@ -57,7 +55,7 @@ from eliot import (
|
||||
|
||||
_TOTAL_SHARES = Field.for_types(
|
||||
u"total_shares",
|
||||
[int, long],
|
||||
[int],
|
||||
u"The total number of shares desired.",
|
||||
)
|
||||
|
||||
@ -104,7 +102,7 @@ _HAPPINESS_MAPPINGS = Field(
|
||||
|
||||
_HAPPINESS = Field.for_types(
|
||||
u"happiness",
|
||||
[int, long],
|
||||
[int],
|
||||
u"The computed happiness of a certain placement.",
|
||||
)
|
||||
|
||||
@ -142,7 +140,7 @@ GET_SHARE_PLACEMENTS = MessageType(
|
||||
|
||||
_EFFECTIVE_HAPPINESS = Field.for_types(
|
||||
u"effective_happiness",
|
||||
[int, long],
|
||||
[int],
|
||||
u"The computed happiness value of a share placement map.",
|
||||
)
|
||||
|
||||
@ -166,7 +164,7 @@ class HelperUploadResults(Copyable, RemoteCopy):
|
||||
# package/module/class name
|
||||
#
|
||||
# Needs to be native string to make Foolscap happy.
|
||||
typeToCopy = native_str("allmydata.upload.UploadResults.tahoe.allmydata.com")
|
||||
typeToCopy = "allmydata.upload.UploadResults.tahoe.allmydata.com"
|
||||
copytype = typeToCopy
|
||||
|
||||
# also, think twice about changing the shape of any existing attribute,
|
||||
@ -1622,7 +1620,7 @@ class AssistedUploader(object):
|
||||
# abbreviated), so if we detect old results, just clobber them.
|
||||
|
||||
sharemap = upload_results.sharemap
|
||||
if any(isinstance(v, (bytes, unicode)) for v in sharemap.values()):
|
||||
if any(isinstance(v, (bytes, str)) for v in sharemap.values()):
|
||||
upload_results.sharemap = None
|
||||
|
||||
def _build_verifycap(self, helper_upload_results):
|
||||
@ -1701,7 +1699,7 @@ class BaseUploadable(object):
|
||||
def set_default_encoding_parameters(self, default_params):
|
||||
assert isinstance(default_params, dict)
|
||||
for k,v in default_params.items():
|
||||
precondition(isinstance(k, (bytes, unicode)), k, v)
|
||||
precondition(isinstance(k, (bytes, str)), k, v)
|
||||
precondition(isinstance(v, int), k, v)
|
||||
if "k" in default_params:
|
||||
self.default_encoding_param_k = default_params["k"]
|
||||
|
@ -6,9 +6,6 @@ Ported to Python 3.
|
||||
Note that for RemoteInterfaces, the __remote_name__ needs to be a native string because of https://github.com/warner/foolscap/blob/43f4485a42c9c28e2c79d655b3a9e24d4e6360ca/src/foolscap/remoteinterface.py#L67
|
||||
"""
|
||||
|
||||
from future.utils import native_str
|
||||
|
||||
from past.builtins import long
|
||||
from typing import Dict
|
||||
|
||||
from zope.interface import Interface, Attribute
|
||||
@ -112,7 +109,7 @@ ReadData = ListOf(ShareData)
|
||||
|
||||
|
||||
class RIStorageServer(RemoteInterface):
|
||||
__remote_name__ = native_str("RIStorageServer.tahoe.allmydata.com")
|
||||
__remote_name__ = "RIStorageServer.tahoe.allmydata.com"
|
||||
|
||||
def get_version():
|
||||
"""
|
||||
@ -2768,13 +2765,13 @@ UploadResults = Any() #DictOf(bytes, bytes)
|
||||
|
||||
|
||||
class RIEncryptedUploadable(RemoteInterface):
|
||||
__remote_name__ = native_str("RIEncryptedUploadable.tahoe.allmydata.com")
|
||||
__remote_name__ = "RIEncryptedUploadable.tahoe.allmydata.com"
|
||||
|
||||
def get_size():
|
||||
return Offset
|
||||
|
||||
def get_all_encoding_parameters():
|
||||
return (int, int, int, long)
|
||||
return (int, int, int, int)
|
||||
|
||||
def read_encrypted(offset=Offset, length=ReadSize):
|
||||
return ListOf(bytes)
|
||||
@ -2784,7 +2781,7 @@ class RIEncryptedUploadable(RemoteInterface):
|
||||
|
||||
|
||||
class RICHKUploadHelper(RemoteInterface):
|
||||
__remote_name__ = native_str("RIUploadHelper.tahoe.allmydata.com")
|
||||
__remote_name__ = "RIUploadHelper.tahoe.allmydata.com"
|
||||
|
||||
def get_version():
|
||||
"""
|
||||
@ -2797,7 +2794,7 @@ class RICHKUploadHelper(RemoteInterface):
|
||||
|
||||
|
||||
class RIHelper(RemoteInterface):
|
||||
__remote_name__ = native_str("RIHelper.tahoe.allmydata.com")
|
||||
__remote_name__ = "RIHelper.tahoe.allmydata.com"
|
||||
|
||||
def get_version():
|
||||
"""
|
||||
|
@ -2,8 +2,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
from six import ensure_text, ensure_str
|
||||
|
||||
import time
|
||||
@ -304,7 +302,7 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
if "seqnum" in old:
|
||||
# must beat previous sequence number to replace
|
||||
if ("seqnum" not in ann
|
||||
or not isinstance(ann["seqnum"], (int,long))):
|
||||
or not isinstance(ann["seqnum"], int)):
|
||||
self.log("not replacing old announcement, no valid seqnum: %s"
|
||||
% (ann,),
|
||||
parent=lp2, level=log.NOISY, umid="zFGH3Q")
|
||||
|
@ -2,9 +2,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
|
||||
from future.utils import native_str
|
||||
|
||||
from zope.interface import Interface
|
||||
from foolscap.api import StringConstraint, SetOf, DictOf, Any, \
|
||||
RemoteInterface, Referenceable
|
||||
@ -34,7 +31,7 @@ FURL = StringConstraint(1000)
|
||||
Announcement_v2 = Any()
|
||||
|
||||
class RIIntroducerSubscriberClient_v2(RemoteInterface):
|
||||
__remote_name__ = native_str("RIIntroducerSubscriberClient_v2.tahoe.allmydata.com")
|
||||
__remote_name__ = "RIIntroducerSubscriberClient_v2.tahoe.allmydata.com"
|
||||
|
||||
def announce_v2(announcements=SetOf(Announcement_v2)):
|
||||
"""I accept announcements from the publisher."""
|
||||
@ -47,11 +44,14 @@ class RIIntroducerPublisherAndSubscriberService_v2(RemoteInterface):
|
||||
announcement message. I will deliver a copy to all connected subscribers.
|
||||
To hear about services, connect to me and subscribe to a specific
|
||||
service_name."""
|
||||
__remote_name__ = native_str("RIIntroducerPublisherAndSubscriberService_v2.tahoe.allmydata.com")
|
||||
__remote_name__ = "RIIntroducerPublisherAndSubscriberService_v2.tahoe.allmydata.com"
|
||||
|
||||
def get_version():
|
||||
return DictOf(bytes, Any())
|
||||
|
||||
def publish_v2(announcement=Announcement_v2, canary=Referenceable):
|
||||
return None
|
||||
|
||||
def subscribe_v2(subscriber=RIIntroducerSubscriberClient_v2,
|
||||
service_name=bytes, subscriber_info=SubscriberInfo):
|
||||
"""Give me a subscriber reference, and I will call its announce_v2()
|
||||
|
@ -4,7 +4,6 @@ Ported to Python 3.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from past.builtins import long
|
||||
from six import ensure_text
|
||||
|
||||
import time, os.path, textwrap
|
||||
@ -262,7 +261,7 @@ class IntroducerService(service.MultiService, Referenceable): # type: ignore[mi
|
||||
if "seqnum" in old_ann:
|
||||
# must beat previous sequence number to replace
|
||||
if ("seqnum" not in ann
|
||||
or not isinstance(ann["seqnum"], (int,long))):
|
||||
or not isinstance(ann["seqnum"], int)):
|
||||
self.log("not replacing old ann, no valid seqnum",
|
||||
level=log.NOISY, umid="ySbaVw")
|
||||
self._debug_counts["inbound_no_seqnum"] += 1
|
||||
|
@ -112,8 +112,8 @@ def formatTimeTahoeStyle(self, when):
|
||||
"""
|
||||
d = datetime.datetime.utcfromtimestamp(when)
|
||||
if d.microsecond:
|
||||
return d.isoformat(ensure_str(" "))[:-3]+"Z"
|
||||
return d.isoformat(ensure_str(" ")) + ".000Z"
|
||||
return d.isoformat(" ")[:-3]+"Z"
|
||||
return d.isoformat(" ") + ".000Z"
|
||||
|
||||
PRIV_README = """
|
||||
This directory contains files which contain private data for the Tahoe node,
|
||||
|
@ -3,7 +3,6 @@ Ported to Python 3.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import six
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
|
||||
@ -13,10 +12,10 @@ if sys.platform == 'win32':
|
||||
from allmydata.windows import registry
|
||||
path = registry.get_base_dir_path()
|
||||
if path:
|
||||
precondition(isinstance(path, six.text_type), path)
|
||||
precondition(isinstance(path, str), path)
|
||||
_default_nodedir = abspath_expanduser_unicode(path)
|
||||
|
||||
if _default_nodedir is None:
|
||||
path = abspath_expanduser_unicode(u"~/.tahoe")
|
||||
precondition(isinstance(path, six.text_type), path)
|
||||
path = abspath_expanduser_unicode("~/.tahoe")
|
||||
precondition(isinstance(path, str), path)
|
||||
_default_nodedir = path
|
||||
|
@ -65,8 +65,8 @@ class Options(usage.Options):
|
||||
]
|
||||
optParameters = [
|
||||
["node-directory", "d", None, NODEDIR_HELP],
|
||||
["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", six.text_type],
|
||||
["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", six.text_type],
|
||||
["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", str],
|
||||
["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", str],
|
||||
]
|
||||
|
||||
def opt_version(self):
|
||||
@ -262,7 +262,7 @@ def _setup_coverage(reactor, argv):
|
||||
# can we put this _setup_coverage call after we hit
|
||||
# argument-parsing?
|
||||
# ensure_str() only necessary on Python 2.
|
||||
if six.ensure_str('--coverage') not in sys.argv:
|
||||
if '--coverage' not in sys.argv:
|
||||
return
|
||||
argv.remove('--coverage')
|
||||
|
||||
|
@ -2,8 +2,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY3
|
||||
|
||||
from six import ensure_str
|
||||
|
||||
import os, time
|
||||
@ -81,9 +79,7 @@ class SlowOperationRunner(object):
|
||||
if not data["finished"]:
|
||||
return False
|
||||
if self.options.get("raw"):
|
||||
if PY3:
|
||||
# need to write bytes!
|
||||
stdout = stdout.buffer
|
||||
stdout = stdout.buffer
|
||||
if is_printable_ascii(jdata):
|
||||
stdout.write(jdata)
|
||||
stdout.write(b"\n")
|
||||
|
@ -2,7 +2,7 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from six import ensure_str, ensure_text
|
||||
from six import ensure_text
|
||||
|
||||
from urllib.parse import quote as url_quote
|
||||
import json
|
||||
@ -168,7 +168,7 @@ class DeepCheckOutput(LineOnlyReceiver, object):
|
||||
# LIT files and directories do not have a "summary" field.
|
||||
summary = cr.get("summary", "Healthy (LIT)")
|
||||
# When Python 2 is dropped the ensure_text()/ensure_str() will be unnecessary.
|
||||
print(ensure_text(ensure_str("%s: %s") % (quote_path(path), quote_output(summary, quotemarks=False)),
|
||||
print(ensure_text("%s: %s" % (quote_path(path), quote_output(summary, quotemarks=False)),
|
||||
encoding=get_io_encoding()), file=stdout)
|
||||
|
||||
# always print out corrupt shares
|
||||
@ -246,13 +246,11 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver, object):
|
||||
if not path:
|
||||
path = ["<root>"]
|
||||
# we don't seem to have a summary available, so build one
|
||||
# When Python 2 is dropped the ensure_text/ensure_str crap can be
|
||||
# dropped.
|
||||
if was_healthy:
|
||||
summary = ensure_str("healthy")
|
||||
summary = "healthy"
|
||||
else:
|
||||
summary = ensure_str("not healthy")
|
||||
print(ensure_text(ensure_str("%s: %s") % (quote_path(path), summary),
|
||||
summary = "not healthy"
|
||||
print(ensure_text("%s: %s" % (quote_path(path), summary),
|
||||
encoding=get_io_encoding()), file=stdout)
|
||||
|
||||
# always print out corrupt shares
|
||||
|
@ -2,8 +2,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY3
|
||||
|
||||
from urllib.parse import quote as url_quote
|
||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
UnknownAliasError
|
||||
@ -36,7 +34,7 @@ def get(options):
|
||||
outf = stdout
|
||||
# Make sure we can write bytes; on Python 3 stdout is Unicode by
|
||||
# default.
|
||||
if PY3 and getattr(outf, "encoding", None) is not None:
|
||||
if getattr(outf, "encoding", None) is not None:
|
||||
outf = outf.buffer
|
||||
while True:
|
||||
data = resp.read(4096)
|
||||
|
@ -2,10 +2,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY3
|
||||
|
||||
from six import ensure_str
|
||||
|
||||
from urllib.parse import quote as url_quote
|
||||
import json
|
||||
from twisted.protocols.basic import LineOnlyReceiver
|
||||
@ -56,8 +52,7 @@ class ManifestStreamer(LineOnlyReceiver, object):
|
||||
# use Twisted to split this into lines
|
||||
self.in_error = False
|
||||
# Writing bytes, so need binary stdout.
|
||||
if PY3:
|
||||
stdout = stdout.buffer
|
||||
stdout = stdout.buffer
|
||||
while True:
|
||||
chunk = resp.read(100)
|
||||
if not chunk:
|
||||
@ -99,8 +94,7 @@ class ManifestStreamer(LineOnlyReceiver, object):
|
||||
if vc:
|
||||
print(quote_output(vc, quotemarks=False), file=stdout)
|
||||
else:
|
||||
# ensure_str() only necessary for Python 2.
|
||||
print(ensure_str("%s %s") % (
|
||||
print("%s %s" % (
|
||||
quote_output(d["cap"], quotemarks=False),
|
||||
quote_path(d["path"], quotemarks=False)), file=stdout)
|
||||
|
||||
|
@ -24,13 +24,12 @@ def print(*args, **kwargs):
|
||||
encoding error handler and then write the result whereas builtin print
|
||||
uses the "strict" encoding error handler.
|
||||
"""
|
||||
from past.builtins import unicode
|
||||
out = kwargs.pop("file", None)
|
||||
if out is None:
|
||||
out = _sys_stdout
|
||||
encoding = out.encoding or "ascii"
|
||||
def ensafe(o):
|
||||
if isinstance(o, unicode):
|
||||
if isinstance(o, str):
|
||||
return o.encode(encoding, errors="replace").decode(encoding)
|
||||
return o
|
||||
return _print(
|
||||
|
@ -2,8 +2,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY3
|
||||
|
||||
import os.path
|
||||
from allmydata.util import base32
|
||||
|
||||
@ -43,7 +41,5 @@ def storage_index_to_dir(storageindex):
|
||||
Returns native string.
|
||||
"""
|
||||
sia = si_b2a(storageindex)
|
||||
if PY3:
|
||||
# On Python 3 we expect paths to be unicode.
|
||||
sia = sia.decode("ascii")
|
||||
sia = sia.decode("ascii")
|
||||
return os.path.join(sia[:2], sia)
|
||||
|
@ -4,9 +4,6 @@ Crawl the storage server shares.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
|
||||
from future.utils import PY2, PY3
|
||||
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
@ -150,10 +147,7 @@ def _dump_json_to_file(js, afile):
|
||||
"""
|
||||
with afile.open("wb") as f:
|
||||
data = json.dumps(js)
|
||||
if PY2:
|
||||
f.write(data)
|
||||
else:
|
||||
f.write(data.encode("utf8"))
|
||||
f.write(data.encode("utf8"))
|
||||
|
||||
|
||||
class _LeaseStateSerializer(object):
|
||||
@ -249,9 +243,7 @@ class ShareCrawler(service.MultiService):
|
||||
self._state_serializer = _LeaseStateSerializer(statefile)
|
||||
self.prefixes = [si_b2a(struct.pack(">H", i << (16-10)))[:2]
|
||||
for i in range(2**10)]
|
||||
if PY3:
|
||||
# On Python 3 we expect the paths to be unicode, not bytes.
|
||||
self.prefixes = [p.decode("ascii") for p in self.prefixes]
|
||||
self.prefixes = [p.decode("ascii") for p in self.prefixes]
|
||||
self.prefixes.sort()
|
||||
self.timer = None
|
||||
self.bucket_cache = (None, [])
|
||||
|
@ -2,10 +2,8 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY2
|
||||
|
||||
import os.path
|
||||
from six.moves import cStringIO as StringIO
|
||||
from io import StringIO
|
||||
from datetime import timedelta
|
||||
import re
|
||||
|
||||
@ -421,10 +419,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
|
||||
else:
|
||||
return original_open(name, *args, **kwargs)
|
||||
|
||||
if PY2:
|
||||
from allmydata.scripts import cli as module_to_patch
|
||||
else:
|
||||
import builtins as module_to_patch
|
||||
import builtins as module_to_patch
|
||||
patcher = MonkeyPatcher((module_to_patch, 'open', call_file))
|
||||
patcher.runWithPatches(parse_options, basedir, "backup", ['--exclude-from-utf-8', unicode_to_argv(exclude_file), 'from', 'to'])
|
||||
self.failUnless(ns.called)
|
||||
|
@ -4,7 +4,7 @@ Ported to Python 3.
|
||||
|
||||
import sys
|
||||
import os.path, time
|
||||
from six.moves import cStringIO as StringIO
|
||||
from io import StringIO
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.util import fileutil
|
||||
|
@ -3,7 +3,7 @@ from six import ensure_text
|
||||
import os.path
|
||||
import json
|
||||
from twisted.trial import unittest
|
||||
from six.moves import cStringIO as StringIO
|
||||
from io import StringIO
|
||||
|
||||
from allmydata import uri
|
||||
from allmydata.util import base32
|
||||
|
@ -2,7 +2,7 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from six.moves import cStringIO as StringIO
|
||||
from io import StringIO
|
||||
import re
|
||||
from six import ensure_text
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from six.moves import StringIO
|
||||
from io import StringIO
|
||||
import os.path
|
||||
from twisted.trial import unittest
|
||||
from urllib.parse import quote as url_quote
|
||||
|
@ -2,9 +2,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY3
|
||||
from six import ensure_str
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
|
||||
@ -12,7 +9,7 @@ from allmydata.immutable import upload
|
||||
from allmydata.interfaces import MDMF_VERSION, SDMF_VERSION
|
||||
from allmydata.mutable.publish import MutableData
|
||||
from ..no_network import GridTestMixin
|
||||
from allmydata.util.encodingutil import quote_output, get_io_encoding
|
||||
from allmydata.util.encodingutil import quote_output
|
||||
from .common import CLITestMixin
|
||||
|
||||
|
||||
@ -26,10 +23,6 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
good_arg = u"g\u00F6\u00F6d"
|
||||
good_out = u"g\u00F6\u00F6d"
|
||||
|
||||
# On Python 2 we get bytes, so we need encoded version. On Python 3
|
||||
# stdio is unicode so can leave unchanged.
|
||||
good_out_encoded = good_out if PY3 else good_out.encode(get_io_encoding())
|
||||
|
||||
d = c0.create_dirnode()
|
||||
def _stash_root_and_create_file(n):
|
||||
self.rootnode = n
|
||||
@ -52,7 +45,7 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
self.assertEqual(len(err), 0, err)
|
||||
expected = sorted([ensure_str("0share"), ensure_str("1share"), good_out_encoded])
|
||||
expected = sorted(["0share", "1share", good_out])
|
||||
self.assertEqual(sorted(out.splitlines()), expected)
|
||||
d.addCallback(_check1)
|
||||
d.addCallback(lambda ign: self.do_cli("ls", "missing"))
|
||||
@ -85,8 +78,8 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
# listing a file (as dir/filename) should have the edge metadata,
|
||||
# including the filename
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
self.failUnlessIn(good_out_encoded, out)
|
||||
self.failIfIn(ensure_str("-r-- %d -" % len(small)), out,
|
||||
self.failUnlessIn(good_out, out)
|
||||
self.failIfIn("-r-- %d -" % len(small), out,
|
||||
"trailing hyphen means unknown date")
|
||||
|
||||
if good_arg is not None:
|
||||
|
@ -5,7 +5,7 @@ Tests for ``allmydata.scripts.tahoe_run``.
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from six.moves import (
|
||||
from io import (
|
||||
StringIO,
|
||||
)
|
||||
|
||||
|
@ -2,9 +2,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY2, PY3, bchr, binary_type
|
||||
from future.builtins import str as future_str
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
@ -13,8 +10,6 @@ from functools import (
|
||||
partial,
|
||||
)
|
||||
from random import randrange
|
||||
if PY2:
|
||||
from StringIO import StringIO
|
||||
from io import (
|
||||
TextIOWrapper,
|
||||
BytesIO,
|
||||
@ -28,6 +23,9 @@ from ..util.assertutil import precondition
|
||||
from ..scripts import runner
|
||||
from allmydata.util.encodingutil import unicode_platform, get_filesystem_encoding, argv_type, unicode_to_argv
|
||||
|
||||
def bchr(s):
|
||||
return bytes([s])
|
||||
|
||||
|
||||
def skip_if_cannot_represent_filename(u):
|
||||
precondition(isinstance(u, str))
|
||||
@ -66,13 +64,13 @@ def run_cli_native(verb, *args, **kwargs):
|
||||
:param runner.Options options: The options instance to use to parse the
|
||||
given arguments.
|
||||
|
||||
:param native_str verb: The command to run. For example,
|
||||
:param str verb: The command to run. For example,
|
||||
``"create-node"``.
|
||||
|
||||
:param [native_str] args: The arguments to pass to the command. For
|
||||
:param [str] args: The arguments to pass to the command. For
|
||||
example, ``("--hostname=localhost",)``.
|
||||
|
||||
:param [native_str] nodeargs: Extra arguments to pass to the Tahoe
|
||||
:param [str] nodeargs: Extra arguments to pass to the Tahoe
|
||||
executable before ``verb``.
|
||||
|
||||
:param bytes|unicode stdin: Text or bytes to pass to the command via stdin.
|
||||
@ -101,22 +99,7 @@ def run_cli_native(verb, *args, **kwargs):
|
||||
)
|
||||
argv = ["tahoe"] + nodeargs + [verb] + list(args)
|
||||
stdin = kwargs.get("stdin", "")
|
||||
if PY2:
|
||||
# The original behavior, the Python 2 behavior, is to accept either
|
||||
# bytes or unicode and try to automatically encode or decode as
|
||||
# necessary. This works okay for ASCII and if LANG is set
|
||||
# appropriately. These aren't great constraints so we should move
|
||||
# away from this behavior.
|
||||
#
|
||||
# The encoding attribute doesn't change StringIO behavior on Python 2,
|
||||
# but it's there for realism of the emulation.
|
||||
stdin = StringIO(stdin)
|
||||
stdin.encoding = encoding
|
||||
stdout = StringIO()
|
||||
stdout.encoding = encoding
|
||||
stderr = StringIO()
|
||||
stderr.encoding = encoding
|
||||
else:
|
||||
if True:
|
||||
# The new behavior, the Python 3 behavior, is to accept unicode and
|
||||
# encode it using a specific encoding. For older versions of Python 3,
|
||||
# the encoding is determined from LANG (bad) but for newer Python 3,
|
||||
@ -146,13 +129,13 @@ def run_cli_native(verb, *args, **kwargs):
|
||||
stderr=stderr,
|
||||
)
|
||||
def _done(rc, stdout=stdout, stderr=stderr):
|
||||
if return_bytes and PY3:
|
||||
if return_bytes:
|
||||
stdout = stdout.buffer
|
||||
stderr = stderr.buffer
|
||||
return 0, _getvalue(stdout), _getvalue(stderr)
|
||||
def _err(f, stdout=stdout, stderr=stderr):
|
||||
f.trap(SystemExit)
|
||||
if return_bytes and PY3:
|
||||
if return_bytes:
|
||||
stdout = stdout.buffer
|
||||
stderr = stderr.buffer
|
||||
return f.value.code, _getvalue(stdout), _getvalue(stderr)
|
||||
@ -182,18 +165,14 @@ def run_cli_unicode(verb, argv, nodeargs=None, stdin=None, encoding=None):
|
||||
if nodeargs is None:
|
||||
nodeargs = []
|
||||
precondition(
|
||||
all(isinstance(arg, future_str) for arg in [verb] + nodeargs + argv),
|
||||
all(isinstance(arg, str) for arg in [verb] + nodeargs + argv),
|
||||
"arguments to run_cli_unicode must be unicode",
|
||||
verb=verb,
|
||||
nodeargs=nodeargs,
|
||||
argv=argv,
|
||||
)
|
||||
codec = encoding or "ascii"
|
||||
if PY2:
|
||||
encode = lambda t: None if t is None else t.encode(codec)
|
||||
else:
|
||||
# On Python 3 command-line parsing expects Unicode!
|
||||
encode = lambda t: t
|
||||
encode = lambda t: t
|
||||
d = run_cli_native(
|
||||
encode(verb),
|
||||
nodeargs=list(encode(arg) for arg in nodeargs),
|
||||
@ -238,7 +217,7 @@ def flip_bit(good, which):
|
||||
def flip_one_bit(s, offset=0, size=None):
|
||||
""" flip one random bit of the string s, in a byte greater than or equal to offset and less
|
||||
than offset+size. """
|
||||
precondition(isinstance(s, binary_type))
|
||||
precondition(isinstance(s, bytes))
|
||||
if size is None:
|
||||
size=len(s)-offset
|
||||
i = randrange(offset, offset+size)
|
||||
@ -250,13 +229,9 @@ def flip_one_bit(s, offset=0, size=None):
|
||||
class ReallyEqualMixin(object):
|
||||
def failUnlessReallyEqual(self, a, b, msg=None):
|
||||
self.assertEqual(a, b, msg)
|
||||
# Make sure unicode strings are a consistent type. Specifically there's
|
||||
# Future newstr (backported Unicode type) vs. Python 2 native unicode
|
||||
# type. They're equal, and _logically_ the same type, but have
|
||||
# different types in practice.
|
||||
if a.__class__ == future_str:
|
||||
if a.__class__ == str:
|
||||
a = str(a)
|
||||
if b.__class__ == future_str:
|
||||
if b.__class__ == str:
|
||||
b = str(b)
|
||||
self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg))
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from six.moves import cStringIO as StringIO
|
||||
from io import StringIO
|
||||
from twisted.internet import defer, reactor
|
||||
from ..common import AsyncBrokenTestCase
|
||||
from testtools.matchers import (
|
||||
|
@ -2,7 +2,7 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from six.moves import cStringIO as StringIO
|
||||
from io import StringIO
|
||||
from ..common import AsyncTestCase
|
||||
from testtools.matchers import Equals, HasLength, Contains
|
||||
from twisted.internet import defer
|
||||
|
@ -4,8 +4,6 @@ Ported to Python 3.
|
||||
|
||||
from future.utils import bchr
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
from io import BytesIO
|
||||
import attr
|
||||
from twisted.internet import defer, reactor
|
||||
@ -129,8 +127,8 @@ class FakeStorageServer(object):
|
||||
continue
|
||||
vector = response[shnum] = []
|
||||
for (offset, length) in readv:
|
||||
assert isinstance(offset, (int, long)), offset
|
||||
assert isinstance(length, (int, long)), length
|
||||
assert isinstance(offset, int), offset
|
||||
assert isinstance(length, int), length
|
||||
vector.append(shares[shnum][offset:offset+length])
|
||||
return response
|
||||
d.addCallback(_read)
|
||||
|
@ -5,7 +5,6 @@ functionality.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import native_str, native_str_to_bytes
|
||||
from six import ensure_str
|
||||
|
||||
import attr
|
||||
@ -40,7 +39,7 @@ from allmydata.util.jsonbytes import (
|
||||
|
||||
|
||||
class RIDummy(RemoteInterface):
|
||||
__remote_name__ = native_str("RIDummy.tahoe.allmydata.com")
|
||||
__remote_name__ = "RIDummy.tahoe.allmydata.com"
|
||||
|
||||
def just_some_method():
|
||||
"""
|
||||
@ -87,7 +86,7 @@ class DummyStorage(object):
|
||||
"""
|
||||
items = configuration.items(self._client_section_name, [])
|
||||
resource = Data(
|
||||
native_str_to_bytes(dumps(dict(items))),
|
||||
dumps(dict(items)).encode("utf-8"),
|
||||
ensure_str("text/json"),
|
||||
)
|
||||
# Give it some dynamic stuff too.
|
||||
@ -105,7 +104,7 @@ class GetCounter(Resource, object):
|
||||
value = 0
|
||||
def render_GET(self, request):
|
||||
self.value += 1
|
||||
return native_str_to_bytes(dumps({"value": self.value}))
|
||||
return dumps({"value": self.value}).encode("utf-8")
|
||||
|
||||
|
||||
@implementer(RIDummy)
|
||||
|
@ -2,8 +2,6 @@
|
||||
This module has been ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY2
|
||||
|
||||
import sys
|
||||
import random
|
||||
|
||||
@ -31,7 +29,7 @@ class TestFlipOneBit(SyncTestCase):
|
||||
|
||||
def test_accepts_byte_string(self):
|
||||
actual = flip_one_bit(b'foo')
|
||||
self.assertEqual(actual, b'fno' if PY2 else b'fom')
|
||||
self.assertEqual(actual, b'fom')
|
||||
|
||||
def test_rejects_unicode_string(self):
|
||||
self.assertRaises(AssertionError, flip_one_bit, u'foo')
|
||||
|
@ -5,8 +5,6 @@ Ported to Python 3.
|
||||
"""
|
||||
|
||||
|
||||
from future.utils import PY3
|
||||
|
||||
import time
|
||||
import os.path
|
||||
from twisted.trial import unittest
|
||||
@ -28,10 +26,9 @@ class BucketEnumeratingCrawler(ShareCrawler):
|
||||
self.all_buckets = []
|
||||
self.finished_d = defer.Deferred()
|
||||
def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32):
|
||||
if PY3:
|
||||
# Bucket _inputs_ are bytes, and that's what we will compare this
|
||||
# to:
|
||||
storage_index_b32 = storage_index_b32.encode("ascii")
|
||||
# Bucket _inputs_ are bytes, and that's what we will compare this
|
||||
# to:
|
||||
storage_index_b32 = storage_index_b32.encode("ascii")
|
||||
self.all_buckets.append(storage_index_b32)
|
||||
def finished_cycle(self, cycle):
|
||||
eventually(self.finished_d.callback, None)
|
||||
@ -46,10 +43,9 @@ class PacedCrawler(ShareCrawler):
|
||||
self.finished_d = defer.Deferred()
|
||||
self.yield_cb = None
|
||||
def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32):
|
||||
if PY3:
|
||||
# Bucket _inputs_ are bytes, and that's what we will compare this
|
||||
# to:
|
||||
storage_index_b32 = storage_index_b32.encode("ascii")
|
||||
# Bucket _inputs_ are bytes, and that's what we will compare this
|
||||
# to:
|
||||
storage_index_b32 = storage_index_b32.encode("ascii")
|
||||
self.all_buckets.append(storage_index_b32)
|
||||
self.countdown -= 1
|
||||
if self.countdown == 0:
|
||||
|
@ -1,6 +1,3 @@
|
||||
|
||||
from future.utils import native_bytes
|
||||
|
||||
import unittest
|
||||
|
||||
from base64 import b64decode
|
||||
@ -40,7 +37,7 @@ class TestRegression(unittest.TestCase):
|
||||
# priv_str = b64encode(priv.serialize())
|
||||
# pub_str = b64encode(priv.get_verifying_key().serialize())
|
||||
RSA_2048_PRIV_KEY = b64decode(f.read().strip())
|
||||
assert isinstance(RSA_2048_PRIV_KEY, native_bytes)
|
||||
assert isinstance(RSA_2048_PRIV_KEY, bytes)
|
||||
|
||||
with RESOURCE_DIR.child('pycryptopp-rsa-2048-sig.txt').open('r') as f:
|
||||
# Signature created using `RSA_2048_PRIV_KEY` via:
|
||||
@ -61,7 +58,7 @@ class TestRegression(unittest.TestCase):
|
||||
# priv_str = b64encode(priv.serialize())
|
||||
# pub_str = b64encode(priv.get_verifying_key().serialize())
|
||||
RSA_TINY_PRIV_KEY = b64decode(f.read().strip())
|
||||
assert isinstance(RSA_TINY_PRIV_KEY, native_bytes)
|
||||
assert isinstance(RSA_TINY_PRIV_KEY, bytes)
|
||||
|
||||
with RESOURCE_DIR.child('pycryptopp-rsa-32768-priv.txt').open('r') as f:
|
||||
# Created using `pycryptopp`:
|
||||
@ -72,7 +69,7 @@ class TestRegression(unittest.TestCase):
|
||||
# priv_str = b64encode(priv.serialize())
|
||||
# pub_str = b64encode(priv.get_verifying_key().serialize())
|
||||
RSA_HUGE_PRIV_KEY = b64decode(f.read().strip())
|
||||
assert isinstance(RSA_HUGE_PRIV_KEY, native_bytes)
|
||||
assert isinstance(RSA_HUGE_PRIV_KEY, bytes)
|
||||
|
||||
def test_old_start_up_test(self):
|
||||
"""
|
||||
@ -324,7 +321,7 @@ class TestEd25519(unittest.TestCase):
|
||||
private_key, public_key = ed25519.create_signing_keypair()
|
||||
private_key_str = ed25519.string_from_signing_key(private_key)
|
||||
|
||||
self.assertIsInstance(private_key_str, native_bytes)
|
||||
self.assertIsInstance(private_key_str, bytes)
|
||||
|
||||
private_key2, public_key2 = ed25519.signing_keypair_from_string(private_key_str)
|
||||
|
||||
@ -340,7 +337,7 @@ class TestEd25519(unittest.TestCase):
|
||||
|
||||
# ditto, but for the verifying keys
|
||||
public_key_str = ed25519.string_from_verifying_key(public_key)
|
||||
self.assertIsInstance(public_key_str, native_bytes)
|
||||
self.assertIsInstance(public_key_str, bytes)
|
||||
|
||||
public_key2 = ed25519.verifying_key_from_string(public_key_str)
|
||||
self.assertEqual(
|
||||
@ -444,7 +441,7 @@ class TestRsa(unittest.TestCase):
|
||||
priv_key, pub_key = rsa.create_signing_keypair(2048)
|
||||
priv_key_str = rsa.der_string_from_signing_key(priv_key)
|
||||
|
||||
self.assertIsInstance(priv_key_str, native_bytes)
|
||||
self.assertIsInstance(priv_key_str, bytes)
|
||||
|
||||
priv_key2, pub_key2 = rsa.create_signing_keypair_from_string(priv_key_str)
|
||||
|
||||
|
@ -3,8 +3,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
import time
|
||||
import unicodedata
|
||||
from zope.interface import implementer
|
||||
@ -1854,7 +1852,7 @@ class DeepStats(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
(101, 316, 216),
|
||||
(317, 1000, 684),
|
||||
(1001, 3162, 99),
|
||||
(long(3162277660169), long(10000000000000), 1),
|
||||
(3162277660169, 10000000000000, 1),
|
||||
])
|
||||
|
||||
class UCWEingMutableFileNode(MutableFileNode):
|
||||
|
@ -10,7 +10,6 @@ from future.utils import bchr
|
||||
|
||||
from typing import Any
|
||||
|
||||
import six
|
||||
import os
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, reactor
|
||||
@ -30,9 +29,6 @@ from allmydata.immutable.downloader.fetcher import SegmentFetcher
|
||||
from allmydata.codec import CRSDecoder
|
||||
from foolscap.eventual import eventually, fireEventually, flushEventualQueue
|
||||
|
||||
if six.PY3:
|
||||
long = int
|
||||
|
||||
plaintext = b"This is a moderate-sized file.\n" * 10
|
||||
mutable_plaintext = b"This is a moderate-sized mutable file.\n" * 10
|
||||
|
||||
|
@ -1,8 +1,4 @@
|
||||
|
||||
from future.utils import PY2, PY3
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
lumiere_nfc = u"lumi\u00E8re"
|
||||
Artonwall_nfc = u"\u00C4rtonwall.mp3"
|
||||
Artonwall_nfd = u"A\u0308rtonwall.mp3"
|
||||
@ -46,13 +42,7 @@ if __name__ == "__main__":
|
||||
for fname in TEST_FILENAMES:
|
||||
open(os.path.join(tmpdir, fname), 'w').close()
|
||||
|
||||
# On Python 2, listing directories returns unicode under Windows or
|
||||
# MacOS X if the input is unicode. On Python 3, it always returns
|
||||
# Unicode.
|
||||
if PY2 and sys.platform in ('win32', 'darwin'):
|
||||
dirlist = os.listdir(unicode(tmpdir))
|
||||
else:
|
||||
dirlist = os.listdir(tmpdir)
|
||||
dirlist = os.listdir(tmpdir)
|
||||
|
||||
print(" dirlist = %s" % repr(dirlist))
|
||||
except:
|
||||
@ -64,7 +54,6 @@ if __name__ == "__main__":
|
||||
|
||||
|
||||
import os, sys
|
||||
from unittest import skipIf
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
@ -76,7 +65,7 @@ from allmydata.test.common_util import (
|
||||
from allmydata.util import encodingutil, fileutil
|
||||
from allmydata.util.encodingutil import unicode_to_url, \
|
||||
unicode_to_output, quote_output, quote_path, quote_local_unicode_path, \
|
||||
quote_filepath, unicode_platform, listdir_unicode, FilenameEncodingError, \
|
||||
quote_filepath, unicode_platform, listdir_unicode, \
|
||||
get_filesystem_encoding, to_bytes, from_utf8_or_none, _reload, \
|
||||
to_filepath, extend_filepath, unicode_from_filepath, unicode_segments_from, \
|
||||
unicode_to_argv
|
||||
@ -84,47 +73,6 @@ from allmydata.util.encodingutil import unicode_to_url, \
|
||||
class MockStdout(object):
|
||||
pass
|
||||
|
||||
# The following tests apply only to platforms that don't store filenames as
|
||||
# Unicode entities on the filesystem.
|
||||
class EncodingUtilNonUnicodePlatform(unittest.TestCase):
|
||||
@skipIf(PY3, "Python 3 is always Unicode, regardless of OS.")
|
||||
def setUp(self):
|
||||
# Make sure everything goes back to the way it was at the end of the
|
||||
# test.
|
||||
self.addCleanup(_reload)
|
||||
|
||||
# Mock sys.platform because unicode_platform() uses it. Cleanups run
|
||||
# in reverse order so we do this second so it gets undone first.
|
||||
self.patch(sys, "platform", "linux")
|
||||
|
||||
def test_listdir_unicode(self):
|
||||
# What happens if latin1-encoded filenames are encountered on an UTF-8
|
||||
# filesystem?
|
||||
def call_os_listdir(path):
|
||||
return [
|
||||
lumiere_nfc.encode('utf-8'),
|
||||
lumiere_nfc.encode('latin1')
|
||||
]
|
||||
self.patch(os, 'listdir', call_os_listdir)
|
||||
|
||||
sys_filesystemencoding = 'utf-8'
|
||||
def call_sys_getfilesystemencoding():
|
||||
return sys_filesystemencoding
|
||||
self.patch(sys, 'getfilesystemencoding', call_sys_getfilesystemencoding)
|
||||
|
||||
_reload()
|
||||
self.failUnlessRaises(FilenameEncodingError,
|
||||
listdir_unicode,
|
||||
u'/dummy')
|
||||
|
||||
# We're trying to list a directory whose name cannot be represented in
|
||||
# the filesystem encoding. This should fail.
|
||||
sys_filesystemencoding = 'ascii'
|
||||
_reload()
|
||||
self.failUnlessRaises(FilenameEncodingError,
|
||||
listdir_unicode,
|
||||
u'/' + lumiere_nfc)
|
||||
|
||||
|
||||
class EncodingUtil(ReallyEqualMixin):
|
||||
def setUp(self):
|
||||
@ -143,10 +91,7 @@ class EncodingUtil(ReallyEqualMixin):
|
||||
converts to bytes using UTF-8 elsewhere.
|
||||
"""
|
||||
result = unicode_to_argv(lumiere_nfc)
|
||||
if PY3 or self.platform == "win32":
|
||||
expected_value = lumiere_nfc
|
||||
else:
|
||||
expected_value = lumiere_nfc.encode(self.io_encoding)
|
||||
expected_value = lumiere_nfc
|
||||
|
||||
self.assertIsInstance(result, type(expected_value))
|
||||
self.assertEqual(result, expected_value)
|
||||
@ -167,13 +112,10 @@ class EncodingUtil(ReallyEqualMixin):
|
||||
% (self.filesystem_encoding,))
|
||||
|
||||
def call_os_listdir(path):
|
||||
if PY2:
|
||||
return self.dirlist
|
||||
else:
|
||||
# Python 3 always lists unicode filenames:
|
||||
return [d.decode(self.filesystem_encoding) if isinstance(d, bytes)
|
||||
else d
|
||||
for d in self.dirlist]
|
||||
# Python 3 always lists unicode filenames:
|
||||
return [d.decode(self.filesystem_encoding) if isinstance(d, bytes)
|
||||
else d
|
||||
for d in self.dirlist]
|
||||
|
||||
self.patch(os, 'listdir', call_os_listdir)
|
||||
|
||||
@ -204,10 +146,7 @@ class StdlibUnicode(unittest.TestCase):
|
||||
fn = lumiere_nfc + u'/' + lumiere_nfc + u'.txt'
|
||||
open(fn, 'wb').close()
|
||||
self.failUnless(os.path.exists(fn))
|
||||
if PY2:
|
||||
getcwdu = os.getcwdu
|
||||
else:
|
||||
getcwdu = os.getcwd
|
||||
getcwdu = os.getcwd
|
||||
self.failUnless(os.path.exists(os.path.join(getcwdu(), fn)))
|
||||
filenames = listdir_unicode(lumiere_nfc)
|
||||
|
||||
@ -237,7 +176,7 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
|
||||
_reload()
|
||||
|
||||
def _check(self, inp, out, enc, optional_quotes, quote_newlines):
|
||||
if PY3 and isinstance(out, bytes):
|
||||
if isinstance(out, bytes):
|
||||
out = out.decode(enc or encodingutil.io_encoding)
|
||||
out2 = out
|
||||
if optional_quotes:
|
||||
@ -266,9 +205,7 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
def _test_quote_output_all(self, enc):
|
||||
def check(inp, out, optional_quotes=False, quote_newlines=None):
|
||||
if PY3:
|
||||
# Result is always Unicode on Python 3
|
||||
out = out.decode("ascii")
|
||||
out = out.decode("ascii")
|
||||
self._check(inp, out, enc, optional_quotes, quote_newlines)
|
||||
|
||||
# optional single quotes
|
||||
@ -354,9 +291,7 @@ def win32_other(win32, other):
|
||||
class QuotePaths(ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
def assertPathsEqual(self, actual, expected):
|
||||
if PY3:
|
||||
# On Python 3, results should be unicode:
|
||||
expected = expected.decode("ascii")
|
||||
expected = expected.decode("ascii")
|
||||
self.failUnlessReallyEqual(actual, expected)
|
||||
|
||||
def test_quote_path(self):
|
||||
|
@ -4,8 +4,6 @@ Tests for allmydata.util.humanreadable.
|
||||
This module has been ported to Python 3.
|
||||
"""
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.util import humanreadable
|
||||
@ -26,7 +24,7 @@ class HumanReadable(unittest.TestCase):
|
||||
self.assertRegex(hr(foo), r"<foo\(\) at test_humanreadable.py:\d+>")
|
||||
self.failUnlessEqual(hr(self.test_repr),
|
||||
"<bound method HumanReadable.test_repr of <allmydata.test.test_humanreadable.HumanReadable testMethod=test_repr>>")
|
||||
self.failUnlessEqual(hr(long(1)), "1")
|
||||
self.failUnlessEqual(hr(1), "1")
|
||||
self.assertIn(hr(10**40),
|
||||
["100000000000000000...000000000000000000",
|
||||
"100000000000000000...0000000000000000000"])
|
||||
|
@ -6,8 +6,8 @@ import os
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, error
|
||||
from twisted.python.usage import UsageError
|
||||
from six.moves import StringIO
|
||||
import mock
|
||||
from io import StringIO
|
||||
from unittest import mock
|
||||
from ..util import i2p_provider
|
||||
from ..scripts import create_node, runner
|
||||
|
||||
|
@ -2,10 +2,6 @@
|
||||
Ported to Python 3
|
||||
"""
|
||||
|
||||
from future.utils import PY2
|
||||
|
||||
from six import ensure_text
|
||||
|
||||
import os.path, re, sys
|
||||
from os import linesep
|
||||
import locale
|
||||
@ -129,18 +125,14 @@ def run_bintahoe(extra_argv, python_options=None):
|
||||
:return: A three-tuple of stdout (unicode), stderr (unicode), and the
|
||||
child process "returncode" (int).
|
||||
"""
|
||||
executable = ensure_text(sys.executable)
|
||||
argv = [executable]
|
||||
argv = [sys.executable]
|
||||
if python_options is not None:
|
||||
argv.extend(python_options)
|
||||
argv.extend([u"-b", u"-m", u"allmydata.scripts.runner"])
|
||||
argv.extend(extra_argv)
|
||||
argv = list(unicode_to_argv(arg) for arg in argv)
|
||||
p = Popen(argv, stdout=PIPE, stderr=PIPE)
|
||||
if PY2:
|
||||
encoding = "utf-8"
|
||||
else:
|
||||
encoding = locale.getpreferredencoding(False)
|
||||
encoding = locale.getpreferredencoding(False)
|
||||
out = p.stdout.read().decode(encoding)
|
||||
err = p.stderr.read().decode(encoding)
|
||||
returncode = p.wait()
|
||||
@ -154,10 +146,7 @@ class BinTahoe(common_util.SignalMixin, unittest.TestCase):
|
||||
"""
|
||||
tricky = u"\u00F6"
|
||||
out, err, returncode = run_bintahoe([tricky])
|
||||
if PY2:
|
||||
expected = u"Unknown command: \\xf6"
|
||||
else:
|
||||
expected = u"Unknown command: \xf6"
|
||||
expected = u"Unknown command: \xf6"
|
||||
self.assertEqual(returncode, 1)
|
||||
self.assertIn(
|
||||
expected,
|
||||
|
@ -2,8 +2,6 @@
|
||||
Tests for allmydata.util.spans.
|
||||
"""
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
import binascii
|
||||
import hashlib
|
||||
|
||||
@ -116,9 +114,6 @@ class ByteSpans(unittest.TestCase):
|
||||
s1 = Spans(3, 4) # 3,4,5,6
|
||||
self._check1(s1)
|
||||
|
||||
s1 = Spans(long(3), long(4)) # 3,4,5,6
|
||||
self._check1(s1)
|
||||
|
||||
s2 = Spans(s1)
|
||||
self._check1(s2)
|
||||
|
||||
@ -446,9 +441,9 @@ class StringSpans(unittest.TestCase):
|
||||
self.failUnlessEqual(ds.get(2, 4), b"fear")
|
||||
|
||||
ds = klass()
|
||||
ds.add(long(2), b"four")
|
||||
ds.add(long(3), b"ea")
|
||||
self.failUnlessEqual(ds.get(long(2), long(4)), b"fear")
|
||||
ds.add(2, b"four")
|
||||
ds.add(3, b"ea")
|
||||
self.failUnlessEqual(ds.get(2, 4), b"fear")
|
||||
|
||||
|
||||
def do_scan(self, klass):
|
||||
|
@ -4,7 +4,7 @@ Tests for allmydata.util.statistics.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from six.moves import StringIO # native string StringIO
|
||||
from io import StringIO
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
|
@ -9,7 +9,7 @@ import os.path
|
||||
import re
|
||||
import json
|
||||
from unittest import skipIf
|
||||
from six.moves import StringIO
|
||||
from io import StringIO
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
|
@ -2,8 +2,6 @@
|
||||
Tests for allmydata.util.time_format.
|
||||
"""
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
import time
|
||||
|
||||
from twisted.trial import unittest
|
||||
@ -103,7 +101,7 @@ class TimeFormat(unittest.TestCase, TimezoneMixin):
|
||||
def test_parse_date(self):
|
||||
p = time_format.parse_date
|
||||
self.failUnlessEqual(p("2010-02-21"), 1266710400)
|
||||
self.failUnless(isinstance(p("2009-03-18"), (int, long)), p("2009-03-18"))
|
||||
self.failUnless(isinstance(p("2009-03-18"), int), p("2009-03-18"))
|
||||
self.failUnlessEqual(p("2009-03-18"), 1237334400)
|
||||
|
||||
def test_format_time(self):
|
||||
|
@ -5,9 +5,8 @@ Ported to Python 3.
|
||||
import os
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, error
|
||||
from six.moves import StringIO
|
||||
from six import ensure_str
|
||||
import mock
|
||||
from io import StringIO
|
||||
from unittest import mock
|
||||
from ..util import tor_provider
|
||||
from ..scripts import create_node, runner
|
||||
from foolscap.eventual import flushEventualQueue
|
||||
@ -185,7 +184,7 @@ class CreateOnion(unittest.TestCase):
|
||||
txtorcon = mock.Mock()
|
||||
ehs = mock.Mock()
|
||||
# This appears to be a native string in the real txtorcon object...
|
||||
ehs.private_key = ensure_str("privkey")
|
||||
ehs.private_key = "privkey"
|
||||
ehs.hostname = "ONION.onion"
|
||||
txtorcon.EphemeralHiddenService = mock.Mock(return_value=ehs)
|
||||
ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None))
|
||||
|
@ -2,7 +2,6 @@
|
||||
Ported to Python3.
|
||||
"""
|
||||
|
||||
import six
|
||||
import os, time, sys
|
||||
import yaml
|
||||
import json
|
||||
@ -22,8 +21,7 @@ from allmydata.util.cputhreadpool import defer_to_thread, disable_thread_pool_fo
|
||||
from allmydata.test.common_util import ReallyEqualMixin
|
||||
from .no_network import fireNow, LocalWrapper
|
||||
|
||||
if six.PY3:
|
||||
long = int
|
||||
long = int
|
||||
|
||||
|
||||
class IDLib(unittest.TestCase):
|
||||
@ -477,7 +475,7 @@ class YAML(unittest.TestCase):
|
||||
Unicode and (ASCII) native strings get roundtripped to Unicode strings.
|
||||
"""
|
||||
data = yaml.safe_dump(
|
||||
[six.ensure_str("str"), u"unicode", u"\u1234nicode"]
|
||||
["str", "unicode", "\u1234nicode"]
|
||||
)
|
||||
back = yamlutil.safe_load(data)
|
||||
self.assertIsInstance(back[0], str)
|
||||
|
@ -5,7 +5,7 @@ Ported to Python 3.
|
||||
import os.path, re
|
||||
from urllib.parse import quote as url_quote
|
||||
import json
|
||||
from six.moves import StringIO
|
||||
from io import StringIO
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
@ -7,8 +7,6 @@ Methods ending in to_string() are actually to_bytes(), possibly should be fixed
|
||||
in follow-up port.
|
||||
"""
|
||||
|
||||
from past.builtins import unicode, long
|
||||
|
||||
import re
|
||||
from typing import Type
|
||||
|
||||
@ -91,7 +89,7 @@ class CHKFileURI(_BaseURI):
|
||||
def to_string(self):
|
||||
assert isinstance(self.needed_shares, int)
|
||||
assert isinstance(self.total_shares, int)
|
||||
assert isinstance(self.size, (int,long))
|
||||
assert isinstance(self.size, int)
|
||||
|
||||
return (b'URI:CHK:%s:%s:%d:%d:%d' %
|
||||
(base32.b2a(self.key),
|
||||
@ -147,7 +145,7 @@ class CHKFileVerifierURI(_BaseURI):
|
||||
def to_string(self):
|
||||
assert isinstance(self.needed_shares, int)
|
||||
assert isinstance(self.total_shares, int)
|
||||
assert isinstance(self.size, (int,long))
|
||||
assert isinstance(self.size, int)
|
||||
|
||||
return (b'URI:CHK-Verifier:%s:%s:%d:%d:%d' %
|
||||
(si_b2a(self.storage_index),
|
||||
@ -742,7 +740,7 @@ ALLEGED_IMMUTABLE_PREFIX = b'imm.'
|
||||
|
||||
def from_string(u, deep_immutable=False, name=u"<unknown name>"):
|
||||
"""Create URI from either unicode or byte string."""
|
||||
if isinstance(u, unicode):
|
||||
if isinstance(u, str):
|
||||
u = u.encode("utf-8")
|
||||
if not isinstance(u, bytes):
|
||||
raise TypeError("URI must be unicode string or bytes: %r" % (u,))
|
||||
@ -844,7 +842,7 @@ def is_uri(s):
|
||||
return False
|
||||
|
||||
def is_literal_file_uri(s):
|
||||
if isinstance(s, unicode):
|
||||
if isinstance(s, str):
|
||||
s = s.encode("utf-8")
|
||||
if not isinstance(s, bytes):
|
||||
return False
|
||||
@ -853,7 +851,7 @@ def is_literal_file_uri(s):
|
||||
s.startswith(ALLEGED_IMMUTABLE_PREFIX + b'URI:LIT:'))
|
||||
|
||||
def has_uri_prefix(s):
|
||||
if isinstance(s, unicode):
|
||||
if isinstance(s, str):
|
||||
s = s.encode("utf-8")
|
||||
if not isinstance(s, bytes):
|
||||
return False
|
||||
@ -895,9 +893,9 @@ def pack_extension(data):
|
||||
pieces = []
|
||||
for k in sorted(data.keys()):
|
||||
value = data[k]
|
||||
if isinstance(value, (int, long)):
|
||||
if isinstance(value, int):
|
||||
value = b"%d" % value
|
||||
if isinstance(k, unicode):
|
||||
if isinstance(k, str):
|
||||
k = k.encode("utf-8")
|
||||
assert isinstance(value, bytes), k
|
||||
assert re.match(br'^[a-zA-Z_\-]+$', k)
|
||||
|
@ -4,15 +4,8 @@ Base62 encoding.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY2
|
||||
|
||||
if PY2:
|
||||
import string
|
||||
maketrans = string.maketrans
|
||||
translate = string.translate
|
||||
else:
|
||||
maketrans = bytes.maketrans
|
||||
translate = bytes.translate
|
||||
maketrans = bytes.maketrans
|
||||
translate = bytes.translate
|
||||
|
||||
from past.builtins import chr as byteschr
|
||||
|
||||
|
@ -8,9 +8,6 @@ Once Python 2 support is dropped, most of this module will obsolete, since
|
||||
Unicode is the default everywhere in Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY3, native_str
|
||||
from future.builtins import str as future_str
|
||||
|
||||
from past.builtins import unicode
|
||||
from six import ensure_str
|
||||
|
||||
@ -56,25 +53,13 @@ def check_encoding(encoding):
|
||||
io_encoding = "utf-8"
|
||||
|
||||
filesystem_encoding = None
|
||||
is_unicode_platform = False
|
||||
use_unicode_filepath = False
|
||||
is_unicode_platform = True
|
||||
use_unicode_filepath = True
|
||||
|
||||
def _reload():
|
||||
global filesystem_encoding, is_unicode_platform, use_unicode_filepath
|
||||
|
||||
global filesystem_encoding
|
||||
filesystem_encoding = canonical_encoding(sys.getfilesystemencoding())
|
||||
check_encoding(filesystem_encoding)
|
||||
is_unicode_platform = PY3 or sys.platform in ["win32", "darwin"]
|
||||
|
||||
# Despite the Unicode-mode FilePath support added to Twisted in
|
||||
# <https://twistedmatrix.com/trac/ticket/7805>, we can't yet use
|
||||
# Unicode-mode FilePaths with INotify on non-Windows platforms due to
|
||||
# <https://twistedmatrix.com/trac/ticket/7928>. Supposedly 7928 is fixed,
|
||||
# though... and Tahoe-LAFS doesn't use inotify anymore!
|
||||
#
|
||||
# In the interest of not breaking anything, this logic is unchanged for
|
||||
# Python 2, but on Python 3 the paths are always unicode, like it or not.
|
||||
use_unicode_filepath = PY3 or sys.platform == "win32"
|
||||
|
||||
_reload()
|
||||
|
||||
@ -128,9 +113,7 @@ def unicode_to_argv(s):
|
||||
Windows, this returns the input unmodified.
|
||||
"""
|
||||
precondition(isinstance(s, unicode), s)
|
||||
if PY3:
|
||||
warnings.warn("This will be unnecessary once Python 2 is dropped.",
|
||||
DeprecationWarning)
|
||||
warnings.warn("This is unnecessary.", DeprecationWarning)
|
||||
if sys.platform == "win32":
|
||||
return s
|
||||
return ensure_str(s)
|
||||
@ -138,7 +121,7 @@ def unicode_to_argv(s):
|
||||
|
||||
# According to unicode_to_argv above, the expected type for
|
||||
# cli args depends on the platform, so capture that expectation.
|
||||
argv_type = (future_str, native_str) if sys.platform == "win32" else native_str
|
||||
argv_type = (str,)
|
||||
"""
|
||||
The expected type for args to a subprocess
|
||||
"""
|
||||
@ -184,24 +167,8 @@ def unicode_to_output(s):
|
||||
the responsibility of stdout/stderr, they expect Unicode by default.
|
||||
"""
|
||||
precondition(isinstance(s, unicode), s)
|
||||
if PY3:
|
||||
warnings.warn("This will be unnecessary once Python 2 is dropped.",
|
||||
DeprecationWarning)
|
||||
return s
|
||||
|
||||
try:
|
||||
out = s.encode(io_encoding)
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0,
|
||||
native_str("A string could not be encoded as %s for output to the terminal:\n%r" %
|
||||
(io_encoding, repr(s))))
|
||||
|
||||
if PRINTABLE_8BIT.search(out) is None:
|
||||
raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0,
|
||||
native_str("A string encoded as %s for output to the terminal contained unsafe bytes:\n%r" %
|
||||
(io_encoding, repr(s))))
|
||||
return out
|
||||
|
||||
warnings.warn("This is unnecessary.", DeprecationWarning)
|
||||
return s
|
||||
|
||||
def _unicode_escape(m, quote_newlines):
|
||||
u = m.group(0)
|
||||
@ -303,20 +270,7 @@ def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
|
||||
return b'"%s"' % (escaped.encode(encoding, 'backslashreplace'),)
|
||||
|
||||
result = _encode(s)
|
||||
if PY3:
|
||||
# On Python 3 half of what this function does is unnecessary, since
|
||||
# sys.stdout typically expects Unicode. To ensure no encode errors, one
|
||||
# can do:
|
||||
#
|
||||
# sys.stdout.reconfigure(encoding=sys.stdout.encoding, errors="backslashreplace")
|
||||
#
|
||||
# Although the problem is that doesn't work in Python 3.6, only 3.7 or
|
||||
# later... For now not thinking about it, just returning unicode since
|
||||
# that is the right thing to do on Python 3.
|
||||
#
|
||||
# Now that Python 3.7 is the minimum, this can in theory be done:
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3866
|
||||
result = result.decode(encoding)
|
||||
result = result.decode(encoding)
|
||||
return result
|
||||
|
||||
|
||||
|
@ -4,13 +4,6 @@ Hashing utilities.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# Don't import bytes to prevent leaking future's bytes.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min, bytes as future_bytes # noqa: F401
|
||||
else:
|
||||
future_bytes = bytes
|
||||
|
||||
from past.builtins import chr as byteschr
|
||||
|
||||
import os
|
||||
@ -246,7 +239,7 @@ def bucket_cancel_secret_hash(file_cancel_secret, peerid):
|
||||
|
||||
|
||||
def _xor(a, b):
|
||||
return b"".join([byteschr(c ^ b) for c in future_bytes(a)])
|
||||
return b"".join([byteschr(c ^ b) for c in bytes(a)])
|
||||
|
||||
|
||||
def hmac(tag, data):
|
||||
|
@ -104,7 +104,7 @@ def get_local_addresses_sync():
|
||||
on the local system.
|
||||
"""
|
||||
return list(
|
||||
native_str(address[native_str("addr")])
|
||||
native_str(address["addr"])
|
||||
for iface_name
|
||||
in interfaces()
|
||||
for address
|
||||
@ -161,7 +161,7 @@ def _foolscapEndpointForPortNumber(portnum):
|
||||
# approach is error prone for the reasons described on
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2787
|
||||
portnum = allocate_tcp_port()
|
||||
return (portnum, native_str("tcp:%d" % (portnum,)))
|
||||
return (portnum, "tcp:%d" % portnum)
|
||||
|
||||
|
||||
@implementer(IStreamServerEndpoint)
|
||||
@ -210,7 +210,7 @@ def listenOnUnused(tub, portnum=None):
|
||||
"""
|
||||
portnum, endpoint = _foolscapEndpointForPortNumber(portnum)
|
||||
tub.listenOn(endpoint)
|
||||
tub.setLocation(native_str("localhost:%d" % (portnum,)))
|
||||
tub.setLocation("localhost:%d" % portnum)
|
||||
return portnum
|
||||
|
||||
|
||||
|
@ -4,7 +4,6 @@ Logging utilities.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY2
|
||||
from six import ensure_str
|
||||
|
||||
from pyutil import nummedobj
|
||||
@ -12,14 +11,10 @@ from pyutil import nummedobj
|
||||
from foolscap.logging import log
|
||||
from twisted.python import log as tw_log
|
||||
|
||||
if PY2:
|
||||
def bytes_to_unicode(ign, obj):
|
||||
return obj
|
||||
else:
|
||||
# We want to convert bytes keys to Unicode, otherwise JSON serialization
|
||||
# inside foolscap will fail (for details see
|
||||
# https://github.com/warner/foolscap/issues/88)
|
||||
from .jsonbytes import bytes_to_unicode
|
||||
# We want to convert bytes keys to Unicode, otherwise JSON serialization
|
||||
# inside foolscap will fail (for details see
|
||||
# https://github.com/warner/foolscap/issues/88)
|
||||
from .jsonbytes import bytes_to_unicode
|
||||
|
||||
|
||||
NOISY = log.NOISY # 10
|
||||
|
@ -4,8 +4,6 @@ Netstring encoding and decoding.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
try:
|
||||
from typing import Optional, Tuple, List # noqa: F401
|
||||
except ImportError:
|
||||
@ -27,7 +25,7 @@ def split_netstring(data, numstrings,
|
||||
data does not exactly equal 'required_trailer'."""
|
||||
assert isinstance(data, bytes)
|
||||
assert required_trailer is None or isinstance(required_trailer, bytes)
|
||||
assert isinstance(position, (int, long)), (repr(position), type(position))
|
||||
assert isinstance(position, int), (repr(position), type(position))
|
||||
elements = []
|
||||
assert numstrings >= 0
|
||||
while position < len(data):
|
||||
|
@ -2,8 +2,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
import itertools
|
||||
import hashlib
|
||||
import re
|
||||
@ -1393,7 +1391,7 @@ class StatusElement(Element):
|
||||
size = op.get_size()
|
||||
if size is None:
|
||||
size = "(unknown)"
|
||||
elif isinstance(size, (int, long, float)):
|
||||
elif isinstance(size, (int, float)):
|
||||
size = abbreviate_size(size)
|
||||
|
||||
result["total_size"] = size
|
||||
|
@ -1,7 +1,3 @@
|
||||
|
||||
from future.utils import PY3
|
||||
from past.builtins import unicode
|
||||
|
||||
# This code isn't loadable or sensible except on Windows. Importers all know
|
||||
# this and are careful. Normally I would just let an import error from ctypes
|
||||
# explain any mistakes but Mypy also needs some help here. This assert
|
||||
@ -15,104 +11,19 @@ from past.builtins import unicode
|
||||
import sys
|
||||
assert sys.platform == "win32"
|
||||
|
||||
import codecs
|
||||
from functools import partial
|
||||
|
||||
from ctypes import WINFUNCTYPE, windll, POINTER, c_int, WinError, byref, get_last_error
|
||||
from ctypes.wintypes import BOOL, HANDLE, DWORD, LPWSTR, LPCWSTR, LPVOID
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/ms680621%28VS.85%29.aspx>
|
||||
from win32api import (
|
||||
STD_OUTPUT_HANDLE,
|
||||
STD_ERROR_HANDLE,
|
||||
SetErrorMode,
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/ms683231(VS.85).aspx>
|
||||
# HANDLE WINAPI GetStdHandle(DWORD nStdHandle);
|
||||
# returns INVALID_HANDLE_VALUE, NULL, or a valid handle
|
||||
GetStdHandle,
|
||||
)
|
||||
from win32con import (
|
||||
SEM_FAILCRITICALERRORS,
|
||||
SEM_NOOPENFILEERRORBOX,
|
||||
)
|
||||
|
||||
from win32file import (
|
||||
INVALID_HANDLE_VALUE,
|
||||
FILE_TYPE_CHAR,
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/aa364960(VS.85).aspx>
|
||||
# DWORD WINAPI GetFileType(DWORD hFile);
|
||||
GetFileType,
|
||||
)
|
||||
|
||||
from allmydata.util import (
|
||||
log,
|
||||
)
|
||||
|
||||
# Keep track of whether `initialize` has run so we don't do any of the
|
||||
# initialization more than once.
|
||||
_done = False
|
||||
|
||||
#
|
||||
# pywin32 for Python 2.7 does not bind any of these *W variants so we do it
|
||||
# ourselves.
|
||||
#
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/windows/desktop/ms687401%28v=vs.85%29.aspx>
|
||||
# BOOL WINAPI WriteConsoleW(HANDLE hOutput, LPWSTR lpBuffer, DWORD nChars,
|
||||
# LPDWORD lpCharsWritten, LPVOID lpReserved);
|
||||
WriteConsoleW = WINFUNCTYPE(
|
||||
BOOL, HANDLE, LPWSTR, DWORD, POINTER(DWORD), LPVOID,
|
||||
use_last_error=True
|
||||
)(("WriteConsoleW", windll.kernel32))
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/windows/desktop/ms683156%28v=vs.85%29.aspx>
|
||||
GetCommandLineW = WINFUNCTYPE(
|
||||
LPWSTR,
|
||||
use_last_error=True
|
||||
)(("GetCommandLineW", windll.kernel32))
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/windows/desktop/bb776391%28v=vs.85%29.aspx>
|
||||
CommandLineToArgvW = WINFUNCTYPE(
|
||||
POINTER(LPWSTR), LPCWSTR, POINTER(c_int),
|
||||
use_last_error=True
|
||||
)(("CommandLineToArgvW", windll.shell32))
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/ms683167(VS.85).aspx>
|
||||
# BOOL WINAPI GetConsoleMode(HANDLE hConsole, LPDWORD lpMode);
|
||||
GetConsoleMode = WINFUNCTYPE(
|
||||
BOOL, HANDLE, POINTER(DWORD),
|
||||
use_last_error=True
|
||||
)(("GetConsoleMode", windll.kernel32))
|
||||
|
||||
|
||||
STDOUT_FILENO = 1
|
||||
STDERR_FILENO = 2
|
||||
|
||||
def get_argv():
|
||||
"""
|
||||
:return [unicode]: The argument list this process was invoked with, as
|
||||
unicode.
|
||||
|
||||
Python 2 does not do a good job exposing this information in
|
||||
``sys.argv`` on Windows so this code re-retrieves the underlying
|
||||
information using Windows API calls and massages it into the right
|
||||
shape.
|
||||
"""
|
||||
command_line = GetCommandLineW()
|
||||
argc = c_int(0)
|
||||
argv_unicode = CommandLineToArgvW(command_line, byref(argc))
|
||||
if argv_unicode is None:
|
||||
raise WinError(get_last_error())
|
||||
|
||||
# Convert it to a normal Python list
|
||||
return list(
|
||||
argv_unicode[i]
|
||||
for i
|
||||
in range(argc.value)
|
||||
)
|
||||
|
||||
|
||||
def initialize():
|
||||
global _done
|
||||
@ -122,188 +33,3 @@ def initialize():
|
||||
_done = True
|
||||
|
||||
SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOOPENFILEERRORBOX)
|
||||
|
||||
if PY3:
|
||||
# The rest of this appears to be Python 2-specific
|
||||
return
|
||||
|
||||
original_stderr = sys.stderr
|
||||
|
||||
# If any exception occurs in this code, we'll probably try to print it on stderr,
|
||||
# which makes for frustrating debugging if stderr is directed to our wrapper.
|
||||
# So be paranoid about catching errors and reporting them to original_stderr,
|
||||
# so that we can at least see them.
|
||||
def _complain(output_file, message):
|
||||
print(isinstance(message, str) and message or repr(message), file=output_file)
|
||||
log.msg(message, level=log.WEIRD)
|
||||
|
||||
_complain = partial(_complain, original_stderr)
|
||||
|
||||
# Work around <http://bugs.python.org/issue6058>.
|
||||
codecs.register(lambda name: name == 'cp65001' and codecs.lookup('utf-8') or None)
|
||||
|
||||
# Make Unicode console output work independently of the current code page.
|
||||
# This also fixes <http://bugs.python.org/issue1602>.
|
||||
# Credit to Michael Kaplan <https://blogs.msdn.com/b/michkap/archive/2010/04/07/9989346.aspx>
|
||||
# and TZOmegaTZIOY
|
||||
# <http://stackoverflow.com/questions/878972/windows-cmd-encoding-change-causes-python-crash/1432462#1432462>.
|
||||
try:
|
||||
old_stdout_fileno = None
|
||||
old_stderr_fileno = None
|
||||
if hasattr(sys.stdout, 'fileno'):
|
||||
old_stdout_fileno = sys.stdout.fileno()
|
||||
if hasattr(sys.stderr, 'fileno'):
|
||||
old_stderr_fileno = sys.stderr.fileno()
|
||||
|
||||
real_stdout = (old_stdout_fileno == STDOUT_FILENO)
|
||||
real_stderr = (old_stderr_fileno == STDERR_FILENO)
|
||||
|
||||
if real_stdout:
|
||||
hStdout = GetStdHandle(STD_OUTPUT_HANDLE)
|
||||
if not a_console(hStdout):
|
||||
real_stdout = False
|
||||
|
||||
if real_stderr:
|
||||
hStderr = GetStdHandle(STD_ERROR_HANDLE)
|
||||
if not a_console(hStderr):
|
||||
real_stderr = False
|
||||
|
||||
if real_stdout:
|
||||
sys.stdout = UnicodeOutput(hStdout, None, STDOUT_FILENO, '<Unicode console stdout>', _complain)
|
||||
else:
|
||||
sys.stdout = UnicodeOutput(None, sys.stdout, old_stdout_fileno, '<Unicode redirected stdout>', _complain)
|
||||
|
||||
if real_stderr:
|
||||
sys.stderr = UnicodeOutput(hStderr, None, STDERR_FILENO, '<Unicode console stderr>', _complain)
|
||||
else:
|
||||
sys.stderr = UnicodeOutput(None, sys.stderr, old_stderr_fileno, '<Unicode redirected stderr>', _complain)
|
||||
except Exception as e:
|
||||
_complain("exception %r while fixing up sys.stdout and sys.stderr" % (e,))
|
||||
|
||||
argv = list(arg.encode("utf-8") for arg in get_argv())
|
||||
|
||||
# Take only the suffix with the same number of arguments as sys.argv.
|
||||
# This accounts for anything that can cause initial arguments to be stripped,
|
||||
# for example, the Python interpreter or any options passed to it, or runner
|
||||
# scripts such as 'coverage run'. It works even if there are no such arguments,
|
||||
# as in the case of a frozen executable created by bb-freeze or similar.
|
||||
#
|
||||
# Also, modify sys.argv in place. If any code has already taken a
|
||||
# reference to the original argument list object then this ensures that
|
||||
# code sees the new values. This reliance on mutation of shared state is,
|
||||
# of course, awful. Why does this function even modify sys.argv? Why not
|
||||
# have a function that *returns* the properly initialized argv as a new
|
||||
# list? I don't know.
|
||||
#
|
||||
# At least Python 3 gets sys.argv correct so before very much longer we
|
||||
# should be able to fix this bad design by deleting it.
|
||||
sys.argv[:] = argv[-len(sys.argv):]
|
||||
|
||||
|
||||
def a_console(handle):
|
||||
"""
|
||||
:return: ``True`` if ``handle`` refers to a console, ``False`` otherwise.
|
||||
"""
|
||||
if handle == INVALID_HANDLE_VALUE:
|
||||
return False
|
||||
return (
|
||||
# It's a character file (eg a printer or a console)
|
||||
GetFileType(handle) == FILE_TYPE_CHAR and
|
||||
# Checking the console mode doesn't fail (thus it's a console)
|
||||
GetConsoleMode(handle, byref(DWORD())) != 0
|
||||
)
|
||||
|
||||
|
||||
class UnicodeOutput(object):
|
||||
"""
|
||||
``UnicodeOutput`` is a file-like object that encodes unicode to UTF-8 and
|
||||
writes it to another file or writes unicode natively to the Windows
|
||||
console.
|
||||
"""
|
||||
def __init__(self, hConsole, stream, fileno, name, _complain):
|
||||
"""
|
||||
:param hConsole: ``None`` or a handle on the console to which to write
|
||||
unicode. Mutually exclusive with ``stream``.
|
||||
|
||||
:param stream: ``None`` or a file-like object to which to write bytes.
|
||||
|
||||
:param fileno: A result to hand back from method of the same name.
|
||||
|
||||
:param name: A human-friendly identifier for this output object.
|
||||
|
||||
:param _complain: A one-argument callable which accepts bytes to be
|
||||
written when there's a problem. Care should be taken to not make
|
||||
this do a write on this object.
|
||||
"""
|
||||
self._hConsole = hConsole
|
||||
self._stream = stream
|
||||
self._fileno = fileno
|
||||
self.closed = False
|
||||
self.softspace = False
|
||||
self.mode = 'w'
|
||||
self.encoding = 'utf-8'
|
||||
self.name = name
|
||||
|
||||
self._complain = _complain
|
||||
|
||||
from allmydata.util.encodingutil import canonical_encoding
|
||||
from allmydata.util import log
|
||||
if hasattr(stream, 'encoding') and canonical_encoding(stream.encoding) != 'utf-8':
|
||||
log.msg("%s: %r had encoding %r, but we're going to write UTF-8 to it" %
|
||||
(name, stream, stream.encoding), level=log.CURIOUS)
|
||||
self.flush()
|
||||
|
||||
def isatty(self):
|
||||
return False
|
||||
def close(self):
|
||||
# don't really close the handle, that would only cause problems
|
||||
self.closed = True
|
||||
def fileno(self):
|
||||
return self._fileno
|
||||
def flush(self):
|
||||
if self._hConsole is None:
|
||||
try:
|
||||
self._stream.flush()
|
||||
except Exception as e:
|
||||
self._complain("%s.flush: %r from %r" % (self.name, e, self._stream))
|
||||
raise
|
||||
|
||||
def write(self, text):
|
||||
try:
|
||||
if self._hConsole is None:
|
||||
# There is no Windows console available. That means we are
|
||||
# responsible for encoding the unicode to a byte string to
|
||||
# write it to a Python file object.
|
||||
if isinstance(text, unicode):
|
||||
text = text.encode('utf-8')
|
||||
self._stream.write(text)
|
||||
else:
|
||||
# There is a Windows console available. That means Windows is
|
||||
# responsible for dealing with the unicode itself.
|
||||
if not isinstance(text, unicode):
|
||||
text = str(text).decode('utf-8')
|
||||
remaining = len(text)
|
||||
while remaining > 0:
|
||||
n = DWORD(0)
|
||||
# There is a shorter-than-documented limitation on the
|
||||
# length of the string passed to WriteConsoleW (see
|
||||
# #1232).
|
||||
retval = WriteConsoleW(self._hConsole, text, min(remaining, 10000), byref(n), None)
|
||||
if retval == 0:
|
||||
raise IOError("WriteConsoleW failed with WinError: %s" % (WinError(get_last_error()),))
|
||||
if n.value == 0:
|
||||
raise IOError("WriteConsoleW returned %r, n.value = 0" % (retval,))
|
||||
remaining -= n.value
|
||||
if remaining == 0: break
|
||||
text = text[n.value:]
|
||||
except Exception as e:
|
||||
self._complain("%s.write: %r" % (self.name, e))
|
||||
raise
|
||||
|
||||
def writelines(self, lines):
|
||||
try:
|
||||
for line in lines:
|
||||
self.write(line)
|
||||
except Exception as e:
|
||||
self._complain("%s.writelines: %r" % (self.name, e))
|
||||
raise
|
||||
|
Loading…
Reference in New Issue
Block a user