Merge pull request #1361 from a-detiste/remove-future

Remove most of the usage of future

Fixes ticket:4093
This commit is contained in:
Itamar Turner-Trauring 2024-03-01 13:56:55 -05:00 committed by GitHub
commit 432e041d9d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
58 changed files with 148 additions and 687 deletions

View File

@ -24,7 +24,7 @@
import os, sys, subprocess, json, tempfile, zipfile, re, itertools import os, sys, subprocess, json, tempfile, zipfile, re, itertools
import email.parser import email.parser
from pprint import pprint from pprint import pprint
from six.moves import StringIO from io import StringIO
import click import click
all_packages = {} # name -> version all_packages = {} # name -> version

0
newsfragments/4093.minor Normal file
View File

View File

@ -4,13 +4,5 @@ Monkey-patching of third party libraries.
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY2
def patch(): def patch():
"""Path third-party libraries to make Tahoe-LAFS work.""" """Path third-party libraries to make Tahoe-LAFS work."""
if not PY2:
# Python 3 doesn't need to monkey patch Foolscap
return

View File

@ -1,8 +1,6 @@
"""Ported to Python 3. """Ported to Python 3.
""" """
from past.builtins import unicode
from zope.interface import implementer from zope.interface import implementer
from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \ from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
IDeepCheckResults, IDeepCheckAndRepairResults, IURI, IDisplayableServer IDeepCheckResults, IDeepCheckAndRepairResults, IURI, IDisplayableServer
@ -63,8 +61,8 @@ class CheckResults(object):
# On Python 2, we can mix bytes and Unicode. On Python 3, we want # On Python 2, we can mix bytes and Unicode. On Python 3, we want
# unicode. # unicode.
if isinstance(summary, bytes): if isinstance(summary, bytes):
summary = unicode(summary, "utf-8") summary = str(summary, "utf-8")
assert isinstance(summary, unicode) # should be a single string assert isinstance(summary, str) # should be a single string
self._summary = summary self._summary = summary
assert not isinstance(report, str) # should be list of strings assert not isinstance(report, str) # should be list of strings
self._report = report self._report = report

View File

@ -10,8 +10,6 @@ objects that `cryptography` documents.
Ported to Python 3. Ported to Python 3.
""" """
import six
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import ( from cryptography.hazmat.primitives.ciphers import (
Cipher, Cipher,
@ -79,7 +77,7 @@ def encrypt_data(encryptor, plaintext):
""" """
_validate_cryptor(encryptor, encrypt=True) _validate_cryptor(encryptor, encrypt=True)
if not isinstance(plaintext, six.binary_type): if not isinstance(plaintext, bytes):
raise ValueError('Plaintext must be bytes') raise ValueError('Plaintext must be bytes')
return encryptor.update(plaintext) return encryptor.update(plaintext)
@ -118,7 +116,7 @@ def decrypt_data(decryptor, plaintext):
""" """
_validate_cryptor(decryptor, encrypt=False) _validate_cryptor(decryptor, encrypt=False)
if not isinstance(plaintext, six.binary_type): if not isinstance(plaintext, bytes):
raise ValueError('Plaintext must be bytes') raise ValueError('Plaintext must be bytes')
return decryptor.update(plaintext) return decryptor.update(plaintext)
@ -160,7 +158,7 @@ def _validate_key(key):
""" """
confirm `key` is suitable for AES encryption, or raise ValueError confirm `key` is suitable for AES encryption, or raise ValueError
""" """
if not isinstance(key, six.binary_type): if not isinstance(key, bytes):
raise TypeError('Key must be bytes') raise TypeError('Key must be bytes')
if len(key) not in (16, 32): if len(key) not in (16, 32):
raise ValueError('Key must be 16 or 32 bytes long') raise ValueError('Key must be 16 or 32 bytes long')
@ -175,7 +173,7 @@ def _validate_iv(iv):
""" """
if iv is None: if iv is None:
return DEFAULT_IV return DEFAULT_IV
if not isinstance(iv, six.binary_type): if not isinstance(iv, bytes):
raise TypeError('IV must be bytes') raise TypeError('IV must be bytes')
if len(iv) != 16: if len(iv) != 16:
raise ValueError('IV must be 16 bytes long') raise ValueError('IV must be 16 bytes long')

View File

@ -45,9 +45,6 @@ noisy = True
from allmydata.util.log import NOISY, OPERATIONAL, WEIRD, \ from allmydata.util.log import NOISY, OPERATIONAL, WEIRD, \
msg as logmsg, PrefixingLogMixin msg as logmsg, PrefixingLogMixin
if six.PY3:
long = int
def createSFTPError(errorCode, errorMessage): def createSFTPError(errorCode, errorMessage):
""" """

View File

@ -4,8 +4,6 @@ Ported to Python 3.
from __future__ import annotations from __future__ import annotations
from future.utils import native_str
from past.builtins import long, unicode
from six import ensure_str from six import ensure_str
import os, time, weakref, itertools import os, time, weakref, itertools
@ -57,7 +55,7 @@ from eliot import (
_TOTAL_SHARES = Field.for_types( _TOTAL_SHARES = Field.for_types(
u"total_shares", u"total_shares",
[int, long], [int],
u"The total number of shares desired.", u"The total number of shares desired.",
) )
@ -104,7 +102,7 @@ _HAPPINESS_MAPPINGS = Field(
_HAPPINESS = Field.for_types( _HAPPINESS = Field.for_types(
u"happiness", u"happiness",
[int, long], [int],
u"The computed happiness of a certain placement.", u"The computed happiness of a certain placement.",
) )
@ -142,7 +140,7 @@ GET_SHARE_PLACEMENTS = MessageType(
_EFFECTIVE_HAPPINESS = Field.for_types( _EFFECTIVE_HAPPINESS = Field.for_types(
u"effective_happiness", u"effective_happiness",
[int, long], [int],
u"The computed happiness value of a share placement map.", u"The computed happiness value of a share placement map.",
) )
@ -166,7 +164,7 @@ class HelperUploadResults(Copyable, RemoteCopy):
# package/module/class name # package/module/class name
# #
# Needs to be native string to make Foolscap happy. # Needs to be native string to make Foolscap happy.
typeToCopy = native_str("allmydata.upload.UploadResults.tahoe.allmydata.com") typeToCopy = "allmydata.upload.UploadResults.tahoe.allmydata.com"
copytype = typeToCopy copytype = typeToCopy
# also, think twice about changing the shape of any existing attribute, # also, think twice about changing the shape of any existing attribute,
@ -1622,7 +1620,7 @@ class AssistedUploader(object):
# abbreviated), so if we detect old results, just clobber them. # abbreviated), so if we detect old results, just clobber them.
sharemap = upload_results.sharemap sharemap = upload_results.sharemap
if any(isinstance(v, (bytes, unicode)) for v in sharemap.values()): if any(isinstance(v, (bytes, str)) for v in sharemap.values()):
upload_results.sharemap = None upload_results.sharemap = None
def _build_verifycap(self, helper_upload_results): def _build_verifycap(self, helper_upload_results):
@ -1701,7 +1699,7 @@ class BaseUploadable(object):
def set_default_encoding_parameters(self, default_params): def set_default_encoding_parameters(self, default_params):
assert isinstance(default_params, dict) assert isinstance(default_params, dict)
for k,v in default_params.items(): for k,v in default_params.items():
precondition(isinstance(k, (bytes, unicode)), k, v) precondition(isinstance(k, (bytes, str)), k, v)
precondition(isinstance(v, int), k, v) precondition(isinstance(v, int), k, v)
if "k" in default_params: if "k" in default_params:
self.default_encoding_param_k = default_params["k"] self.default_encoding_param_k = default_params["k"]

View File

@ -6,9 +6,6 @@ Ported to Python 3.
Note that for RemoteInterfaces, the __remote_name__ needs to be a native string because of https://github.com/warner/foolscap/blob/43f4485a42c9c28e2c79d655b3a9e24d4e6360ca/src/foolscap/remoteinterface.py#L67 Note that for RemoteInterfaces, the __remote_name__ needs to be a native string because of https://github.com/warner/foolscap/blob/43f4485a42c9c28e2c79d655b3a9e24d4e6360ca/src/foolscap/remoteinterface.py#L67
""" """
from future.utils import native_str
from past.builtins import long
from typing import Dict from typing import Dict
from zope.interface import Interface, Attribute from zope.interface import Interface, Attribute
@ -112,7 +109,7 @@ ReadData = ListOf(ShareData)
class RIStorageServer(RemoteInterface): class RIStorageServer(RemoteInterface):
__remote_name__ = native_str("RIStorageServer.tahoe.allmydata.com") __remote_name__ = "RIStorageServer.tahoe.allmydata.com"
def get_version(): def get_version():
""" """
@ -2768,13 +2765,13 @@ UploadResults = Any() #DictOf(bytes, bytes)
class RIEncryptedUploadable(RemoteInterface): class RIEncryptedUploadable(RemoteInterface):
__remote_name__ = native_str("RIEncryptedUploadable.tahoe.allmydata.com") __remote_name__ = "RIEncryptedUploadable.tahoe.allmydata.com"
def get_size(): def get_size():
return Offset return Offset
def get_all_encoding_parameters(): def get_all_encoding_parameters():
return (int, int, int, long) return (int, int, int, int)
def read_encrypted(offset=Offset, length=ReadSize): def read_encrypted(offset=Offset, length=ReadSize):
return ListOf(bytes) return ListOf(bytes)
@ -2784,7 +2781,7 @@ class RIEncryptedUploadable(RemoteInterface):
class RICHKUploadHelper(RemoteInterface): class RICHKUploadHelper(RemoteInterface):
__remote_name__ = native_str("RIUploadHelper.tahoe.allmydata.com") __remote_name__ = "RIUploadHelper.tahoe.allmydata.com"
def get_version(): def get_version():
""" """
@ -2797,7 +2794,7 @@ class RICHKUploadHelper(RemoteInterface):
class RIHelper(RemoteInterface): class RIHelper(RemoteInterface):
__remote_name__ = native_str("RIHelper.tahoe.allmydata.com") __remote_name__ = "RIHelper.tahoe.allmydata.com"
def get_version(): def get_version():
""" """

View File

@ -2,8 +2,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from past.builtins import long
from six import ensure_text, ensure_str from six import ensure_text, ensure_str
import time import time
@ -304,7 +302,7 @@ class IntroducerClient(service.Service, Referenceable):
if "seqnum" in old: if "seqnum" in old:
# must beat previous sequence number to replace # must beat previous sequence number to replace
if ("seqnum" not in ann if ("seqnum" not in ann
or not isinstance(ann["seqnum"], (int,long))): or not isinstance(ann["seqnum"], int)):
self.log("not replacing old announcement, no valid seqnum: %s" self.log("not replacing old announcement, no valid seqnum: %s"
% (ann,), % (ann,),
parent=lp2, level=log.NOISY, umid="zFGH3Q") parent=lp2, level=log.NOISY, umid="zFGH3Q")

View File

@ -2,9 +2,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import native_str
from zope.interface import Interface from zope.interface import Interface
from foolscap.api import StringConstraint, SetOf, DictOf, Any, \ from foolscap.api import StringConstraint, SetOf, DictOf, Any, \
RemoteInterface, Referenceable RemoteInterface, Referenceable
@ -34,7 +31,7 @@ FURL = StringConstraint(1000)
Announcement_v2 = Any() Announcement_v2 = Any()
class RIIntroducerSubscriberClient_v2(RemoteInterface): class RIIntroducerSubscriberClient_v2(RemoteInterface):
__remote_name__ = native_str("RIIntroducerSubscriberClient_v2.tahoe.allmydata.com") __remote_name__ = "RIIntroducerSubscriberClient_v2.tahoe.allmydata.com"
def announce_v2(announcements=SetOf(Announcement_v2)): def announce_v2(announcements=SetOf(Announcement_v2)):
"""I accept announcements from the publisher.""" """I accept announcements from the publisher."""
@ -47,11 +44,14 @@ class RIIntroducerPublisherAndSubscriberService_v2(RemoteInterface):
announcement message. I will deliver a copy to all connected subscribers. announcement message. I will deliver a copy to all connected subscribers.
To hear about services, connect to me and subscribe to a specific To hear about services, connect to me and subscribe to a specific
service_name.""" service_name."""
__remote_name__ = native_str("RIIntroducerPublisherAndSubscriberService_v2.tahoe.allmydata.com") __remote_name__ = "RIIntroducerPublisherAndSubscriberService_v2.tahoe.allmydata.com"
def get_version(): def get_version():
return DictOf(bytes, Any()) return DictOf(bytes, Any())
def publish_v2(announcement=Announcement_v2, canary=Referenceable): def publish_v2(announcement=Announcement_v2, canary=Referenceable):
return None return None
def subscribe_v2(subscriber=RIIntroducerSubscriberClient_v2, def subscribe_v2(subscriber=RIIntroducerSubscriberClient_v2,
service_name=bytes, subscriber_info=SubscriberInfo): service_name=bytes, subscriber_info=SubscriberInfo):
"""Give me a subscriber reference, and I will call its announce_v2() """Give me a subscriber reference, and I will call its announce_v2()

View File

@ -4,7 +4,6 @@ Ported to Python 3.
from __future__ import annotations from __future__ import annotations
from past.builtins import long
from six import ensure_text from six import ensure_text
import time, os.path, textwrap import time, os.path, textwrap
@ -262,7 +261,7 @@ class IntroducerService(service.MultiService, Referenceable): # type: ignore[mi
if "seqnum" in old_ann: if "seqnum" in old_ann:
# must beat previous sequence number to replace # must beat previous sequence number to replace
if ("seqnum" not in ann if ("seqnum" not in ann
or not isinstance(ann["seqnum"], (int,long))): or not isinstance(ann["seqnum"], int)):
self.log("not replacing old ann, no valid seqnum", self.log("not replacing old ann, no valid seqnum",
level=log.NOISY, umid="ySbaVw") level=log.NOISY, umid="ySbaVw")
self._debug_counts["inbound_no_seqnum"] += 1 self._debug_counts["inbound_no_seqnum"] += 1

View File

@ -112,8 +112,8 @@ def formatTimeTahoeStyle(self, when):
""" """
d = datetime.datetime.utcfromtimestamp(when) d = datetime.datetime.utcfromtimestamp(when)
if d.microsecond: if d.microsecond:
return d.isoformat(ensure_str(" "))[:-3]+"Z" return d.isoformat(" ")[:-3]+"Z"
return d.isoformat(ensure_str(" ")) + ".000Z" return d.isoformat(" ") + ".000Z"
PRIV_README = """ PRIV_README = """
This directory contains files which contain private data for the Tahoe node, This directory contains files which contain private data for the Tahoe node,

View File

@ -3,7 +3,6 @@ Ported to Python 3.
""" """
import sys import sys
import six
from allmydata.util.assertutil import precondition from allmydata.util.assertutil import precondition
from allmydata.util.fileutil import abspath_expanduser_unicode from allmydata.util.fileutil import abspath_expanduser_unicode
@ -13,10 +12,10 @@ if sys.platform == 'win32':
from allmydata.windows import registry from allmydata.windows import registry
path = registry.get_base_dir_path() path = registry.get_base_dir_path()
if path: if path:
precondition(isinstance(path, six.text_type), path) precondition(isinstance(path, str), path)
_default_nodedir = abspath_expanduser_unicode(path) _default_nodedir = abspath_expanduser_unicode(path)
if _default_nodedir is None: if _default_nodedir is None:
path = abspath_expanduser_unicode(u"~/.tahoe") path = abspath_expanduser_unicode("~/.tahoe")
precondition(isinstance(path, six.text_type), path) precondition(isinstance(path, str), path)
_default_nodedir = path _default_nodedir = path

View File

@ -65,8 +65,8 @@ class Options(usage.Options):
] ]
optParameters = [ optParameters = [
["node-directory", "d", None, NODEDIR_HELP], ["node-directory", "d", None, NODEDIR_HELP],
["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", six.text_type], ["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", str],
["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", six.text_type], ["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", str],
] ]
def opt_version(self): def opt_version(self):
@ -262,7 +262,7 @@ def _setup_coverage(reactor, argv):
# can we put this _setup_coverage call after we hit # can we put this _setup_coverage call after we hit
# argument-parsing? # argument-parsing?
# ensure_str() only necessary on Python 2. # ensure_str() only necessary on Python 2.
if six.ensure_str('--coverage') not in sys.argv: if '--coverage' not in sys.argv:
return return
argv.remove('--coverage') argv.remove('--coverage')

View File

@ -2,8 +2,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY3
from six import ensure_str from six import ensure_str
import os, time import os, time
@ -81,8 +79,6 @@ class SlowOperationRunner(object):
if not data["finished"]: if not data["finished"]:
return False return False
if self.options.get("raw"): if self.options.get("raw"):
if PY3:
# need to write bytes!
stdout = stdout.buffer stdout = stdout.buffer
if is_printable_ascii(jdata): if is_printable_ascii(jdata):
stdout.write(jdata) stdout.write(jdata)

View File

@ -2,7 +2,7 @@
Ported to Python 3. Ported to Python 3.
""" """
from six import ensure_str, ensure_text from six import ensure_text
from urllib.parse import quote as url_quote from urllib.parse import quote as url_quote
import json import json
@ -168,7 +168,7 @@ class DeepCheckOutput(LineOnlyReceiver, object):
# LIT files and directories do not have a "summary" field. # LIT files and directories do not have a "summary" field.
summary = cr.get("summary", "Healthy (LIT)") summary = cr.get("summary", "Healthy (LIT)")
# When Python 2 is dropped the ensure_text()/ensure_str() will be unnecessary. # When Python 2 is dropped the ensure_text()/ensure_str() will be unnecessary.
print(ensure_text(ensure_str("%s: %s") % (quote_path(path), quote_output(summary, quotemarks=False)), print(ensure_text("%s: %s" % (quote_path(path), quote_output(summary, quotemarks=False)),
encoding=get_io_encoding()), file=stdout) encoding=get_io_encoding()), file=stdout)
# always print out corrupt shares # always print out corrupt shares
@ -246,13 +246,11 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver, object):
if not path: if not path:
path = ["<root>"] path = ["<root>"]
# we don't seem to have a summary available, so build one # we don't seem to have a summary available, so build one
# When Python 2 is dropped the ensure_text/ensure_str crap can be
# dropped.
if was_healthy: if was_healthy:
summary = ensure_str("healthy") summary = "healthy"
else: else:
summary = ensure_str("not healthy") summary = "not healthy"
print(ensure_text(ensure_str("%s: %s") % (quote_path(path), summary), print(ensure_text("%s: %s" % (quote_path(path), summary),
encoding=get_io_encoding()), file=stdout) encoding=get_io_encoding()), file=stdout)
# always print out corrupt shares # always print out corrupt shares

View File

@ -2,8 +2,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY3
from urllib.parse import quote as url_quote from urllib.parse import quote as url_quote
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \ from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
UnknownAliasError UnknownAliasError
@ -36,7 +34,7 @@ def get(options):
outf = stdout outf = stdout
# Make sure we can write bytes; on Python 3 stdout is Unicode by # Make sure we can write bytes; on Python 3 stdout is Unicode by
# default. # default.
if PY3 and getattr(outf, "encoding", None) is not None: if getattr(outf, "encoding", None) is not None:
outf = outf.buffer outf = outf.buffer
while True: while True:
data = resp.read(4096) data = resp.read(4096)

View File

@ -2,10 +2,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY3
from six import ensure_str
from urllib.parse import quote as url_quote from urllib.parse import quote as url_quote
import json import json
from twisted.protocols.basic import LineOnlyReceiver from twisted.protocols.basic import LineOnlyReceiver
@ -56,7 +52,6 @@ class ManifestStreamer(LineOnlyReceiver, object):
# use Twisted to split this into lines # use Twisted to split this into lines
self.in_error = False self.in_error = False
# Writing bytes, so need binary stdout. # Writing bytes, so need binary stdout.
if PY3:
stdout = stdout.buffer stdout = stdout.buffer
while True: while True:
chunk = resp.read(100) chunk = resp.read(100)
@ -99,8 +94,7 @@ class ManifestStreamer(LineOnlyReceiver, object):
if vc: if vc:
print(quote_output(vc, quotemarks=False), file=stdout) print(quote_output(vc, quotemarks=False), file=stdout)
else: else:
# ensure_str() only necessary for Python 2. print("%s %s" % (
print(ensure_str("%s %s") % (
quote_output(d["cap"], quotemarks=False), quote_output(d["cap"], quotemarks=False),
quote_path(d["path"], quotemarks=False)), file=stdout) quote_path(d["path"], quotemarks=False)), file=stdout)

View File

@ -24,13 +24,12 @@ def print(*args, **kwargs):
encoding error handler and then write the result whereas builtin print encoding error handler and then write the result whereas builtin print
uses the "strict" encoding error handler. uses the "strict" encoding error handler.
""" """
from past.builtins import unicode
out = kwargs.pop("file", None) out = kwargs.pop("file", None)
if out is None: if out is None:
out = _sys_stdout out = _sys_stdout
encoding = out.encoding or "ascii" encoding = out.encoding or "ascii"
def ensafe(o): def ensafe(o):
if isinstance(o, unicode): if isinstance(o, str):
return o.encode(encoding, errors="replace").decode(encoding) return o.encode(encoding, errors="replace").decode(encoding)
return o return o
return _print( return _print(

View File

@ -2,8 +2,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY3
import os.path import os.path
from allmydata.util import base32 from allmydata.util import base32
@ -43,7 +41,5 @@ def storage_index_to_dir(storageindex):
Returns native string. Returns native string.
""" """
sia = si_b2a(storageindex) sia = si_b2a(storageindex)
if PY3:
# On Python 3 we expect paths to be unicode.
sia = sia.decode("ascii") sia = sia.decode("ascii")
return os.path.join(sia[:2], sia) return os.path.join(sia[:2], sia)

View File

@ -4,9 +4,6 @@ Crawl the storage server shares.
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY2, PY3
import os import os
import time import time
import json import json
@ -150,9 +147,6 @@ def _dump_json_to_file(js, afile):
""" """
with afile.open("wb") as f: with afile.open("wb") as f:
data = json.dumps(js) data = json.dumps(js)
if PY2:
f.write(data)
else:
f.write(data.encode("utf8")) f.write(data.encode("utf8"))
@ -249,8 +243,6 @@ class ShareCrawler(service.MultiService):
self._state_serializer = _LeaseStateSerializer(statefile) self._state_serializer = _LeaseStateSerializer(statefile)
self.prefixes = [si_b2a(struct.pack(">H", i << (16-10)))[:2] self.prefixes = [si_b2a(struct.pack(">H", i << (16-10)))[:2]
for i in range(2**10)] for i in range(2**10)]
if PY3:
# On Python 3 we expect the paths to be unicode, not bytes.
self.prefixes = [p.decode("ascii") for p in self.prefixes] self.prefixes = [p.decode("ascii") for p in self.prefixes]
self.prefixes.sort() self.prefixes.sort()
self.timer = None self.timer = None

View File

@ -2,10 +2,8 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY2
import os.path import os.path
from six.moves import cStringIO as StringIO from io import StringIO
from datetime import timedelta from datetime import timedelta
import re import re
@ -421,9 +419,6 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
else: else:
return original_open(name, *args, **kwargs) return original_open(name, *args, **kwargs)
if PY2:
from allmydata.scripts import cli as module_to_patch
else:
import builtins as module_to_patch import builtins as module_to_patch
patcher = MonkeyPatcher((module_to_patch, 'open', call_file)) patcher = MonkeyPatcher((module_to_patch, 'open', call_file))
patcher.runWithPatches(parse_options, basedir, "backup", ['--exclude-from-utf-8', unicode_to_argv(exclude_file), 'from', 'to']) patcher.runWithPatches(parse_options, basedir, "backup", ['--exclude-from-utf-8', unicode_to_argv(exclude_file), 'from', 'to'])

View File

@ -4,7 +4,7 @@ Ported to Python 3.
import sys import sys
import os.path, time import os.path, time
from six.moves import cStringIO as StringIO from io import StringIO
from twisted.trial import unittest from twisted.trial import unittest
from allmydata.util import fileutil from allmydata.util import fileutil

View File

@ -3,7 +3,7 @@ from six import ensure_text
import os.path import os.path
import json import json
from twisted.trial import unittest from twisted.trial import unittest
from six.moves import cStringIO as StringIO from io import StringIO
from allmydata import uri from allmydata import uri
from allmydata.util import base32 from allmydata.util import base32

View File

@ -2,7 +2,7 @@
Ported to Python 3. Ported to Python 3.
""" """
from six.moves import cStringIO as StringIO from io import StringIO
import re import re
from six import ensure_text from six import ensure_text

View File

@ -2,7 +2,7 @@
Ported to Python 3. Ported to Python 3.
""" """
from six.moves import StringIO from io import StringIO
import os.path import os.path
from twisted.trial import unittest from twisted.trial import unittest
from urllib.parse import quote as url_quote from urllib.parse import quote as url_quote

View File

@ -2,9 +2,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY3
from six import ensure_str
from twisted.trial import unittest from twisted.trial import unittest
from twisted.internet import defer from twisted.internet import defer
@ -12,7 +9,7 @@ from allmydata.immutable import upload
from allmydata.interfaces import MDMF_VERSION, SDMF_VERSION from allmydata.interfaces import MDMF_VERSION, SDMF_VERSION
from allmydata.mutable.publish import MutableData from allmydata.mutable.publish import MutableData
from ..no_network import GridTestMixin from ..no_network import GridTestMixin
from allmydata.util.encodingutil import quote_output, get_io_encoding from allmydata.util.encodingutil import quote_output
from .common import CLITestMixin from .common import CLITestMixin
@ -26,10 +23,6 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
good_arg = u"g\u00F6\u00F6d" good_arg = u"g\u00F6\u00F6d"
good_out = u"g\u00F6\u00F6d" good_out = u"g\u00F6\u00F6d"
# On Python 2 we get bytes, so we need encoded version. On Python 3
# stdio is unicode so can leave unchanged.
good_out_encoded = good_out if PY3 else good_out.encode(get_io_encoding())
d = c0.create_dirnode() d = c0.create_dirnode()
def _stash_root_and_create_file(n): def _stash_root_and_create_file(n):
self.rootnode = n self.rootnode = n
@ -52,7 +45,7 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
(rc, out, err) = args (rc, out, err) = args
self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(rc, 0)
self.assertEqual(len(err), 0, err) self.assertEqual(len(err), 0, err)
expected = sorted([ensure_str("0share"), ensure_str("1share"), good_out_encoded]) expected = sorted(["0share", "1share", good_out])
self.assertEqual(sorted(out.splitlines()), expected) self.assertEqual(sorted(out.splitlines()), expected)
d.addCallback(_check1) d.addCallback(_check1)
d.addCallback(lambda ign: self.do_cli("ls", "missing")) d.addCallback(lambda ign: self.do_cli("ls", "missing"))
@ -85,8 +78,8 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
# listing a file (as dir/filename) should have the edge metadata, # listing a file (as dir/filename) should have the edge metadata,
# including the filename # including the filename
self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(rc, 0)
self.failUnlessIn(good_out_encoded, out) self.failUnlessIn(good_out, out)
self.failIfIn(ensure_str("-r-- %d -" % len(small)), out, self.failIfIn("-r-- %d -" % len(small), out,
"trailing hyphen means unknown date") "trailing hyphen means unknown date")
if good_arg is not None: if good_arg is not None:

View File

@ -5,7 +5,7 @@ Tests for ``allmydata.scripts.tahoe_run``.
from __future__ import annotations from __future__ import annotations
import re import re
from six.moves import ( from io import (
StringIO, StringIO,
) )

View File

@ -2,9 +2,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY2, PY3, bchr, binary_type
from future.builtins import str as future_str
import os import os
import sys import sys
import time import time
@ -13,8 +10,6 @@ from functools import (
partial, partial,
) )
from random import randrange from random import randrange
if PY2:
from StringIO import StringIO
from io import ( from io import (
TextIOWrapper, TextIOWrapper,
BytesIO, BytesIO,
@ -28,6 +23,9 @@ from ..util.assertutil import precondition
from ..scripts import runner from ..scripts import runner
from allmydata.util.encodingutil import unicode_platform, get_filesystem_encoding, argv_type, unicode_to_argv from allmydata.util.encodingutil import unicode_platform, get_filesystem_encoding, argv_type, unicode_to_argv
def bchr(s):
return bytes([s])
def skip_if_cannot_represent_filename(u): def skip_if_cannot_represent_filename(u):
precondition(isinstance(u, str)) precondition(isinstance(u, str))
@ -66,13 +64,13 @@ def run_cli_native(verb, *args, **kwargs):
:param runner.Options options: The options instance to use to parse the :param runner.Options options: The options instance to use to parse the
given arguments. given arguments.
:param native_str verb: The command to run. For example, :param str verb: The command to run. For example,
``"create-node"``. ``"create-node"``.
:param [native_str] args: The arguments to pass to the command. For :param [str] args: The arguments to pass to the command. For
example, ``("--hostname=localhost",)``. example, ``("--hostname=localhost",)``.
:param [native_str] nodeargs: Extra arguments to pass to the Tahoe :param [str] nodeargs: Extra arguments to pass to the Tahoe
executable before ``verb``. executable before ``verb``.
:param bytes|unicode stdin: Text or bytes to pass to the command via stdin. :param bytes|unicode stdin: Text or bytes to pass to the command via stdin.
@ -101,22 +99,7 @@ def run_cli_native(verb, *args, **kwargs):
) )
argv = ["tahoe"] + nodeargs + [verb] + list(args) argv = ["tahoe"] + nodeargs + [verb] + list(args)
stdin = kwargs.get("stdin", "") stdin = kwargs.get("stdin", "")
if PY2: if True:
# The original behavior, the Python 2 behavior, is to accept either
# bytes or unicode and try to automatically encode or decode as
# necessary. This works okay for ASCII and if LANG is set
# appropriately. These aren't great constraints so we should move
# away from this behavior.
#
# The encoding attribute doesn't change StringIO behavior on Python 2,
# but it's there for realism of the emulation.
stdin = StringIO(stdin)
stdin.encoding = encoding
stdout = StringIO()
stdout.encoding = encoding
stderr = StringIO()
stderr.encoding = encoding
else:
# The new behavior, the Python 3 behavior, is to accept unicode and # The new behavior, the Python 3 behavior, is to accept unicode and
# encode it using a specific encoding. For older versions of Python 3, # encode it using a specific encoding. For older versions of Python 3,
# the encoding is determined from LANG (bad) but for newer Python 3, # the encoding is determined from LANG (bad) but for newer Python 3,
@ -146,13 +129,13 @@ def run_cli_native(verb, *args, **kwargs):
stderr=stderr, stderr=stderr,
) )
def _done(rc, stdout=stdout, stderr=stderr): def _done(rc, stdout=stdout, stderr=stderr):
if return_bytes and PY3: if return_bytes:
stdout = stdout.buffer stdout = stdout.buffer
stderr = stderr.buffer stderr = stderr.buffer
return 0, _getvalue(stdout), _getvalue(stderr) return 0, _getvalue(stdout), _getvalue(stderr)
def _err(f, stdout=stdout, stderr=stderr): def _err(f, stdout=stdout, stderr=stderr):
f.trap(SystemExit) f.trap(SystemExit)
if return_bytes and PY3: if return_bytes:
stdout = stdout.buffer stdout = stdout.buffer
stderr = stderr.buffer stderr = stderr.buffer
return f.value.code, _getvalue(stdout), _getvalue(stderr) return f.value.code, _getvalue(stdout), _getvalue(stderr)
@ -182,17 +165,13 @@ def run_cli_unicode(verb, argv, nodeargs=None, stdin=None, encoding=None):
if nodeargs is None: if nodeargs is None:
nodeargs = [] nodeargs = []
precondition( precondition(
all(isinstance(arg, future_str) for arg in [verb] + nodeargs + argv), all(isinstance(arg, str) for arg in [verb] + nodeargs + argv),
"arguments to run_cli_unicode must be unicode", "arguments to run_cli_unicode must be unicode",
verb=verb, verb=verb,
nodeargs=nodeargs, nodeargs=nodeargs,
argv=argv, argv=argv,
) )
codec = encoding or "ascii" codec = encoding or "ascii"
if PY2:
encode = lambda t: None if t is None else t.encode(codec)
else:
# On Python 3 command-line parsing expects Unicode!
encode = lambda t: t encode = lambda t: t
d = run_cli_native( d = run_cli_native(
encode(verb), encode(verb),
@ -238,7 +217,7 @@ def flip_bit(good, which):
def flip_one_bit(s, offset=0, size=None): def flip_one_bit(s, offset=0, size=None):
""" flip one random bit of the string s, in a byte greater than or equal to offset and less """ flip one random bit of the string s, in a byte greater than or equal to offset and less
than offset+size. """ than offset+size. """
precondition(isinstance(s, binary_type)) precondition(isinstance(s, bytes))
if size is None: if size is None:
size=len(s)-offset size=len(s)-offset
i = randrange(offset, offset+size) i = randrange(offset, offset+size)
@ -250,13 +229,9 @@ def flip_one_bit(s, offset=0, size=None):
class ReallyEqualMixin(object): class ReallyEqualMixin(object):
def failUnlessReallyEqual(self, a, b, msg=None): def failUnlessReallyEqual(self, a, b, msg=None):
self.assertEqual(a, b, msg) self.assertEqual(a, b, msg)
# Make sure unicode strings are a consistent type. Specifically there's if a.__class__ == str:
# Future newstr (backported Unicode type) vs. Python 2 native unicode
# type. They're equal, and _logically_ the same type, but have
# different types in practice.
if a.__class__ == future_str:
a = str(a) a = str(a)
if b.__class__ == future_str: if b.__class__ == str:
b = str(b) b = str(b)
self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg)) self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg))

View File

@ -2,7 +2,7 @@
Ported to Python 3. Ported to Python 3.
""" """
from six.moves import cStringIO as StringIO from io import StringIO
from twisted.internet import defer, reactor from twisted.internet import defer, reactor
from ..common import AsyncBrokenTestCase from ..common import AsyncBrokenTestCase
from testtools.matchers import ( from testtools.matchers import (

View File

@ -2,7 +2,7 @@
Ported to Python 3. Ported to Python 3.
""" """
from six.moves import cStringIO as StringIO from io import StringIO
from ..common import AsyncTestCase from ..common import AsyncTestCase
from testtools.matchers import Equals, HasLength, Contains from testtools.matchers import Equals, HasLength, Contains
from twisted.internet import defer from twisted.internet import defer

View File

@ -4,8 +4,6 @@ Ported to Python 3.
from future.utils import bchr from future.utils import bchr
from past.builtins import long
from io import BytesIO from io import BytesIO
import attr import attr
from twisted.internet import defer, reactor from twisted.internet import defer, reactor
@ -129,8 +127,8 @@ class FakeStorageServer(object):
continue continue
vector = response[shnum] = [] vector = response[shnum] = []
for (offset, length) in readv: for (offset, length) in readv:
assert isinstance(offset, (int, long)), offset assert isinstance(offset, int), offset
assert isinstance(length, (int, long)), length assert isinstance(length, int), length
vector.append(shares[shnum][offset:offset+length]) vector.append(shares[shnum][offset:offset+length])
return response return response
d.addCallback(_read) d.addCallback(_read)

View File

@ -5,7 +5,6 @@ functionality.
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import native_str, native_str_to_bytes
from six import ensure_str from six import ensure_str
import attr import attr
@ -40,7 +39,7 @@ from allmydata.util.jsonbytes import (
class RIDummy(RemoteInterface): class RIDummy(RemoteInterface):
__remote_name__ = native_str("RIDummy.tahoe.allmydata.com") __remote_name__ = "RIDummy.tahoe.allmydata.com"
def just_some_method(): def just_some_method():
""" """
@ -87,7 +86,7 @@ class DummyStorage(object):
""" """
items = configuration.items(self._client_section_name, []) items = configuration.items(self._client_section_name, [])
resource = Data( resource = Data(
native_str_to_bytes(dumps(dict(items))), dumps(dict(items)).encode("utf-8"),
ensure_str("text/json"), ensure_str("text/json"),
) )
# Give it some dynamic stuff too. # Give it some dynamic stuff too.
@ -105,7 +104,7 @@ class GetCounter(Resource, object):
value = 0 value = 0
def render_GET(self, request): def render_GET(self, request):
self.value += 1 self.value += 1
return native_str_to_bytes(dumps({"value": self.value})) return dumps({"value": self.value}).encode("utf-8")
@implementer(RIDummy) @implementer(RIDummy)

View File

@ -2,8 +2,6 @@
This module has been ported to Python 3. This module has been ported to Python 3.
""" """
from future.utils import PY2
import sys import sys
import random import random
@ -31,7 +29,7 @@ class TestFlipOneBit(SyncTestCase):
def test_accepts_byte_string(self): def test_accepts_byte_string(self):
actual = flip_one_bit(b'foo') actual = flip_one_bit(b'foo')
self.assertEqual(actual, b'fno' if PY2 else b'fom') self.assertEqual(actual, b'fom')
def test_rejects_unicode_string(self): def test_rejects_unicode_string(self):
self.assertRaises(AssertionError, flip_one_bit, u'foo') self.assertRaises(AssertionError, flip_one_bit, u'foo')

View File

@ -5,8 +5,6 @@ Ported to Python 3.
""" """
from future.utils import PY3
import time import time
import os.path import os.path
from twisted.trial import unittest from twisted.trial import unittest
@ -28,7 +26,6 @@ class BucketEnumeratingCrawler(ShareCrawler):
self.all_buckets = [] self.all_buckets = []
self.finished_d = defer.Deferred() self.finished_d = defer.Deferred()
def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32): def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32):
if PY3:
# Bucket _inputs_ are bytes, and that's what we will compare this # Bucket _inputs_ are bytes, and that's what we will compare this
# to: # to:
storage_index_b32 = storage_index_b32.encode("ascii") storage_index_b32 = storage_index_b32.encode("ascii")
@ -46,7 +43,6 @@ class PacedCrawler(ShareCrawler):
self.finished_d = defer.Deferred() self.finished_d = defer.Deferred()
self.yield_cb = None self.yield_cb = None
def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32): def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32):
if PY3:
# Bucket _inputs_ are bytes, and that's what we will compare this # Bucket _inputs_ are bytes, and that's what we will compare this
# to: # to:
storage_index_b32 = storage_index_b32.encode("ascii") storage_index_b32 = storage_index_b32.encode("ascii")

View File

@ -1,6 +1,3 @@
from future.utils import native_bytes
import unittest import unittest
from base64 import b64decode from base64 import b64decode
@ -40,7 +37,7 @@ class TestRegression(unittest.TestCase):
# priv_str = b64encode(priv.serialize()) # priv_str = b64encode(priv.serialize())
# pub_str = b64encode(priv.get_verifying_key().serialize()) # pub_str = b64encode(priv.get_verifying_key().serialize())
RSA_2048_PRIV_KEY = b64decode(f.read().strip()) RSA_2048_PRIV_KEY = b64decode(f.read().strip())
assert isinstance(RSA_2048_PRIV_KEY, native_bytes) assert isinstance(RSA_2048_PRIV_KEY, bytes)
with RESOURCE_DIR.child('pycryptopp-rsa-2048-sig.txt').open('r') as f: with RESOURCE_DIR.child('pycryptopp-rsa-2048-sig.txt').open('r') as f:
# Signature created using `RSA_2048_PRIV_KEY` via: # Signature created using `RSA_2048_PRIV_KEY` via:
@ -61,7 +58,7 @@ class TestRegression(unittest.TestCase):
# priv_str = b64encode(priv.serialize()) # priv_str = b64encode(priv.serialize())
# pub_str = b64encode(priv.get_verifying_key().serialize()) # pub_str = b64encode(priv.get_verifying_key().serialize())
RSA_TINY_PRIV_KEY = b64decode(f.read().strip()) RSA_TINY_PRIV_KEY = b64decode(f.read().strip())
assert isinstance(RSA_TINY_PRIV_KEY, native_bytes) assert isinstance(RSA_TINY_PRIV_KEY, bytes)
with RESOURCE_DIR.child('pycryptopp-rsa-32768-priv.txt').open('r') as f: with RESOURCE_DIR.child('pycryptopp-rsa-32768-priv.txt').open('r') as f:
# Created using `pycryptopp`: # Created using `pycryptopp`:
@ -72,7 +69,7 @@ class TestRegression(unittest.TestCase):
# priv_str = b64encode(priv.serialize()) # priv_str = b64encode(priv.serialize())
# pub_str = b64encode(priv.get_verifying_key().serialize()) # pub_str = b64encode(priv.get_verifying_key().serialize())
RSA_HUGE_PRIV_KEY = b64decode(f.read().strip()) RSA_HUGE_PRIV_KEY = b64decode(f.read().strip())
assert isinstance(RSA_HUGE_PRIV_KEY, native_bytes) assert isinstance(RSA_HUGE_PRIV_KEY, bytes)
def test_old_start_up_test(self): def test_old_start_up_test(self):
""" """
@ -324,7 +321,7 @@ class TestEd25519(unittest.TestCase):
private_key, public_key = ed25519.create_signing_keypair() private_key, public_key = ed25519.create_signing_keypair()
private_key_str = ed25519.string_from_signing_key(private_key) private_key_str = ed25519.string_from_signing_key(private_key)
self.assertIsInstance(private_key_str, native_bytes) self.assertIsInstance(private_key_str, bytes)
private_key2, public_key2 = ed25519.signing_keypair_from_string(private_key_str) private_key2, public_key2 = ed25519.signing_keypair_from_string(private_key_str)
@ -340,7 +337,7 @@ class TestEd25519(unittest.TestCase):
# ditto, but for the verifying keys # ditto, but for the verifying keys
public_key_str = ed25519.string_from_verifying_key(public_key) public_key_str = ed25519.string_from_verifying_key(public_key)
self.assertIsInstance(public_key_str, native_bytes) self.assertIsInstance(public_key_str, bytes)
public_key2 = ed25519.verifying_key_from_string(public_key_str) public_key2 = ed25519.verifying_key_from_string(public_key_str)
self.assertEqual( self.assertEqual(
@ -444,7 +441,7 @@ class TestRsa(unittest.TestCase):
priv_key, pub_key = rsa.create_signing_keypair(2048) priv_key, pub_key = rsa.create_signing_keypair(2048)
priv_key_str = rsa.der_string_from_signing_key(priv_key) priv_key_str = rsa.der_string_from_signing_key(priv_key)
self.assertIsInstance(priv_key_str, native_bytes) self.assertIsInstance(priv_key_str, bytes)
priv_key2, pub_key2 = rsa.create_signing_keypair_from_string(priv_key_str) priv_key2, pub_key2 = rsa.create_signing_keypair_from_string(priv_key_str)

View File

@ -3,8 +3,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from past.builtins import long
import time import time
import unicodedata import unicodedata
from zope.interface import implementer from zope.interface import implementer
@ -1854,7 +1852,7 @@ class DeepStats(testutil.ReallyEqualMixin, unittest.TestCase):
(101, 316, 216), (101, 316, 216),
(317, 1000, 684), (317, 1000, 684),
(1001, 3162, 99), (1001, 3162, 99),
(long(3162277660169), long(10000000000000), 1), (3162277660169, 10000000000000, 1),
]) ])
class UCWEingMutableFileNode(MutableFileNode): class UCWEingMutableFileNode(MutableFileNode):

View File

@ -10,7 +10,6 @@ from future.utils import bchr
from typing import Any from typing import Any
import six
import os import os
from twisted.trial import unittest from twisted.trial import unittest
from twisted.internet import defer, reactor from twisted.internet import defer, reactor
@ -30,9 +29,6 @@ from allmydata.immutable.downloader.fetcher import SegmentFetcher
from allmydata.codec import CRSDecoder from allmydata.codec import CRSDecoder
from foolscap.eventual import eventually, fireEventually, flushEventualQueue from foolscap.eventual import eventually, fireEventually, flushEventualQueue
if six.PY3:
long = int
plaintext = b"This is a moderate-sized file.\n" * 10 plaintext = b"This is a moderate-sized file.\n" * 10
mutable_plaintext = b"This is a moderate-sized mutable file.\n" * 10 mutable_plaintext = b"This is a moderate-sized mutable file.\n" * 10

View File

@ -1,8 +1,4 @@
from future.utils import PY2, PY3
from past.builtins import unicode
lumiere_nfc = u"lumi\u00E8re" lumiere_nfc = u"lumi\u00E8re"
Artonwall_nfc = u"\u00C4rtonwall.mp3" Artonwall_nfc = u"\u00C4rtonwall.mp3"
Artonwall_nfd = u"A\u0308rtonwall.mp3" Artonwall_nfd = u"A\u0308rtonwall.mp3"
@ -46,12 +42,6 @@ if __name__ == "__main__":
for fname in TEST_FILENAMES: for fname in TEST_FILENAMES:
open(os.path.join(tmpdir, fname), 'w').close() open(os.path.join(tmpdir, fname), 'w').close()
# On Python 2, listing directories returns unicode under Windows or
# MacOS X if the input is unicode. On Python 3, it always returns
# Unicode.
if PY2 and sys.platform in ('win32', 'darwin'):
dirlist = os.listdir(unicode(tmpdir))
else:
dirlist = os.listdir(tmpdir) dirlist = os.listdir(tmpdir)
print(" dirlist = %s" % repr(dirlist)) print(" dirlist = %s" % repr(dirlist))
@ -64,7 +54,6 @@ if __name__ == "__main__":
import os, sys import os, sys
from unittest import skipIf
from twisted.trial import unittest from twisted.trial import unittest
@ -76,7 +65,7 @@ from allmydata.test.common_util import (
from allmydata.util import encodingutil, fileutil from allmydata.util import encodingutil, fileutil
from allmydata.util.encodingutil import unicode_to_url, \ from allmydata.util.encodingutil import unicode_to_url, \
unicode_to_output, quote_output, quote_path, quote_local_unicode_path, \ unicode_to_output, quote_output, quote_path, quote_local_unicode_path, \
quote_filepath, unicode_platform, listdir_unicode, FilenameEncodingError, \ quote_filepath, unicode_platform, listdir_unicode, \
get_filesystem_encoding, to_bytes, from_utf8_or_none, _reload, \ get_filesystem_encoding, to_bytes, from_utf8_or_none, _reload, \
to_filepath, extend_filepath, unicode_from_filepath, unicode_segments_from, \ to_filepath, extend_filepath, unicode_from_filepath, unicode_segments_from, \
unicode_to_argv unicode_to_argv
@ -84,47 +73,6 @@ from allmydata.util.encodingutil import unicode_to_url, \
class MockStdout(object): class MockStdout(object):
pass pass
# The following tests apply only to platforms that don't store filenames as
# Unicode entities on the filesystem.
class EncodingUtilNonUnicodePlatform(unittest.TestCase):
@skipIf(PY3, "Python 3 is always Unicode, regardless of OS.")
def setUp(self):
# Make sure everything goes back to the way it was at the end of the
# test.
self.addCleanup(_reload)
# Mock sys.platform because unicode_platform() uses it. Cleanups run
# in reverse order so we do this second so it gets undone first.
self.patch(sys, "platform", "linux")
def test_listdir_unicode(self):
# What happens if latin1-encoded filenames are encountered on an UTF-8
# filesystem?
def call_os_listdir(path):
return [
lumiere_nfc.encode('utf-8'),
lumiere_nfc.encode('latin1')
]
self.patch(os, 'listdir', call_os_listdir)
sys_filesystemencoding = 'utf-8'
def call_sys_getfilesystemencoding():
return sys_filesystemencoding
self.patch(sys, 'getfilesystemencoding', call_sys_getfilesystemencoding)
_reload()
self.failUnlessRaises(FilenameEncodingError,
listdir_unicode,
u'/dummy')
# We're trying to list a directory whose name cannot be represented in
# the filesystem encoding. This should fail.
sys_filesystemencoding = 'ascii'
_reload()
self.failUnlessRaises(FilenameEncodingError,
listdir_unicode,
u'/' + lumiere_nfc)
class EncodingUtil(ReallyEqualMixin): class EncodingUtil(ReallyEqualMixin):
def setUp(self): def setUp(self):
@ -143,10 +91,7 @@ class EncodingUtil(ReallyEqualMixin):
converts to bytes using UTF-8 elsewhere. converts to bytes using UTF-8 elsewhere.
""" """
result = unicode_to_argv(lumiere_nfc) result = unicode_to_argv(lumiere_nfc)
if PY3 or self.platform == "win32":
expected_value = lumiere_nfc expected_value = lumiere_nfc
else:
expected_value = lumiere_nfc.encode(self.io_encoding)
self.assertIsInstance(result, type(expected_value)) self.assertIsInstance(result, type(expected_value))
self.assertEqual(result, expected_value) self.assertEqual(result, expected_value)
@ -167,9 +112,6 @@ class EncodingUtil(ReallyEqualMixin):
% (self.filesystem_encoding,)) % (self.filesystem_encoding,))
def call_os_listdir(path): def call_os_listdir(path):
if PY2:
return self.dirlist
else:
# Python 3 always lists unicode filenames: # Python 3 always lists unicode filenames:
return [d.decode(self.filesystem_encoding) if isinstance(d, bytes) return [d.decode(self.filesystem_encoding) if isinstance(d, bytes)
else d else d
@ -204,9 +146,6 @@ class StdlibUnicode(unittest.TestCase):
fn = lumiere_nfc + u'/' + lumiere_nfc + u'.txt' fn = lumiere_nfc + u'/' + lumiere_nfc + u'.txt'
open(fn, 'wb').close() open(fn, 'wb').close()
self.failUnless(os.path.exists(fn)) self.failUnless(os.path.exists(fn))
if PY2:
getcwdu = os.getcwdu
else:
getcwdu = os.getcwd getcwdu = os.getcwd
self.failUnless(os.path.exists(os.path.join(getcwdu(), fn))) self.failUnless(os.path.exists(os.path.join(getcwdu(), fn)))
filenames = listdir_unicode(lumiere_nfc) filenames = listdir_unicode(lumiere_nfc)
@ -237,7 +176,7 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
_reload() _reload()
def _check(self, inp, out, enc, optional_quotes, quote_newlines): def _check(self, inp, out, enc, optional_quotes, quote_newlines):
if PY3 and isinstance(out, bytes): if isinstance(out, bytes):
out = out.decode(enc or encodingutil.io_encoding) out = out.decode(enc or encodingutil.io_encoding)
out2 = out out2 = out
if optional_quotes: if optional_quotes:
@ -266,8 +205,6 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
def _test_quote_output_all(self, enc): def _test_quote_output_all(self, enc):
def check(inp, out, optional_quotes=False, quote_newlines=None): def check(inp, out, optional_quotes=False, quote_newlines=None):
if PY3:
# Result is always Unicode on Python 3
out = out.decode("ascii") out = out.decode("ascii")
self._check(inp, out, enc, optional_quotes, quote_newlines) self._check(inp, out, enc, optional_quotes, quote_newlines)
@ -354,8 +291,6 @@ def win32_other(win32, other):
class QuotePaths(ReallyEqualMixin, unittest.TestCase): class QuotePaths(ReallyEqualMixin, unittest.TestCase):
def assertPathsEqual(self, actual, expected): def assertPathsEqual(self, actual, expected):
if PY3:
# On Python 3, results should be unicode:
expected = expected.decode("ascii") expected = expected.decode("ascii")
self.failUnlessReallyEqual(actual, expected) self.failUnlessReallyEqual(actual, expected)

View File

@ -4,8 +4,6 @@ Tests for allmydata.util.humanreadable.
This module has been ported to Python 3. This module has been ported to Python 3.
""" """
from past.builtins import long
from twisted.trial import unittest from twisted.trial import unittest
from allmydata.util import humanreadable from allmydata.util import humanreadable
@ -26,7 +24,7 @@ class HumanReadable(unittest.TestCase):
self.assertRegex(hr(foo), r"<foo\(\) at test_humanreadable.py:\d+>") self.assertRegex(hr(foo), r"<foo\(\) at test_humanreadable.py:\d+>")
self.failUnlessEqual(hr(self.test_repr), self.failUnlessEqual(hr(self.test_repr),
"<bound method HumanReadable.test_repr of <allmydata.test.test_humanreadable.HumanReadable testMethod=test_repr>>") "<bound method HumanReadable.test_repr of <allmydata.test.test_humanreadable.HumanReadable testMethod=test_repr>>")
self.failUnlessEqual(hr(long(1)), "1") self.failUnlessEqual(hr(1), "1")
self.assertIn(hr(10**40), self.assertIn(hr(10**40),
["100000000000000000...000000000000000000", ["100000000000000000...000000000000000000",
"100000000000000000...0000000000000000000"]) "100000000000000000...0000000000000000000"])

View File

@ -6,8 +6,8 @@ import os
from twisted.trial import unittest from twisted.trial import unittest
from twisted.internet import defer, error from twisted.internet import defer, error
from twisted.python.usage import UsageError from twisted.python.usage import UsageError
from six.moves import StringIO from io import StringIO
import mock from unittest import mock
from ..util import i2p_provider from ..util import i2p_provider
from ..scripts import create_node, runner from ..scripts import create_node, runner

View File

@ -2,10 +2,6 @@
Ported to Python 3 Ported to Python 3
""" """
from future.utils import PY2
from six import ensure_text
import os.path, re, sys import os.path, re, sys
from os import linesep from os import linesep
import locale import locale
@ -129,17 +125,13 @@ def run_bintahoe(extra_argv, python_options=None):
:return: A three-tuple of stdout (unicode), stderr (unicode), and the :return: A three-tuple of stdout (unicode), stderr (unicode), and the
child process "returncode" (int). child process "returncode" (int).
""" """
executable = ensure_text(sys.executable) argv = [sys.executable]
argv = [executable]
if python_options is not None: if python_options is not None:
argv.extend(python_options) argv.extend(python_options)
argv.extend([u"-b", u"-m", u"allmydata.scripts.runner"]) argv.extend([u"-b", u"-m", u"allmydata.scripts.runner"])
argv.extend(extra_argv) argv.extend(extra_argv)
argv = list(unicode_to_argv(arg) for arg in argv) argv = list(unicode_to_argv(arg) for arg in argv)
p = Popen(argv, stdout=PIPE, stderr=PIPE) p = Popen(argv, stdout=PIPE, stderr=PIPE)
if PY2:
encoding = "utf-8"
else:
encoding = locale.getpreferredencoding(False) encoding = locale.getpreferredencoding(False)
out = p.stdout.read().decode(encoding) out = p.stdout.read().decode(encoding)
err = p.stderr.read().decode(encoding) err = p.stderr.read().decode(encoding)
@ -154,9 +146,6 @@ class BinTahoe(common_util.SignalMixin, unittest.TestCase):
""" """
tricky = u"\u00F6" tricky = u"\u00F6"
out, err, returncode = run_bintahoe([tricky]) out, err, returncode = run_bintahoe([tricky])
if PY2:
expected = u"Unknown command: \\xf6"
else:
expected = u"Unknown command: \xf6" expected = u"Unknown command: \xf6"
self.assertEqual(returncode, 1) self.assertEqual(returncode, 1)
self.assertIn( self.assertIn(

View File

@ -2,8 +2,6 @@
Tests for allmydata.util.spans. Tests for allmydata.util.spans.
""" """
from past.builtins import long
import binascii import binascii
import hashlib import hashlib
@ -116,9 +114,6 @@ class ByteSpans(unittest.TestCase):
s1 = Spans(3, 4) # 3,4,5,6 s1 = Spans(3, 4) # 3,4,5,6
self._check1(s1) self._check1(s1)
s1 = Spans(long(3), long(4)) # 3,4,5,6
self._check1(s1)
s2 = Spans(s1) s2 = Spans(s1)
self._check1(s2) self._check1(s2)
@ -446,9 +441,9 @@ class StringSpans(unittest.TestCase):
self.failUnlessEqual(ds.get(2, 4), b"fear") self.failUnlessEqual(ds.get(2, 4), b"fear")
ds = klass() ds = klass()
ds.add(long(2), b"four") ds.add(2, b"four")
ds.add(long(3), b"ea") ds.add(3, b"ea")
self.failUnlessEqual(ds.get(long(2), long(4)), b"fear") self.failUnlessEqual(ds.get(2, 4), b"fear")
def do_scan(self, klass): def do_scan(self, klass):

View File

@ -4,7 +4,7 @@ Tests for allmydata.util.statistics.
Ported to Python 3. Ported to Python 3.
""" """
from six.moves import StringIO # native string StringIO from io import StringIO
from twisted.trial import unittest from twisted.trial import unittest

View File

@ -9,7 +9,7 @@ import os.path
import re import re
import json import json
from unittest import skipIf from unittest import skipIf
from six.moves import StringIO from io import StringIO
from twisted.trial import unittest from twisted.trial import unittest
from twisted.internet import defer from twisted.internet import defer

View File

@ -2,8 +2,6 @@
Tests for allmydata.util.time_format. Tests for allmydata.util.time_format.
""" """
from past.builtins import long
import time import time
from twisted.trial import unittest from twisted.trial import unittest
@ -103,7 +101,7 @@ class TimeFormat(unittest.TestCase, TimezoneMixin):
def test_parse_date(self): def test_parse_date(self):
p = time_format.parse_date p = time_format.parse_date
self.failUnlessEqual(p("2010-02-21"), 1266710400) self.failUnlessEqual(p("2010-02-21"), 1266710400)
self.failUnless(isinstance(p("2009-03-18"), (int, long)), p("2009-03-18")) self.failUnless(isinstance(p("2009-03-18"), int), p("2009-03-18"))
self.failUnlessEqual(p("2009-03-18"), 1237334400) self.failUnlessEqual(p("2009-03-18"), 1237334400)
def test_format_time(self): def test_format_time(self):

View File

@ -5,9 +5,8 @@ Ported to Python 3.
import os import os
from twisted.trial import unittest from twisted.trial import unittest
from twisted.internet import defer, error from twisted.internet import defer, error
from six.moves import StringIO from io import StringIO
from six import ensure_str from unittest import mock
import mock
from ..util import tor_provider from ..util import tor_provider
from ..scripts import create_node, runner from ..scripts import create_node, runner
from foolscap.eventual import flushEventualQueue from foolscap.eventual import flushEventualQueue
@ -185,7 +184,7 @@ class CreateOnion(unittest.TestCase):
txtorcon = mock.Mock() txtorcon = mock.Mock()
ehs = mock.Mock() ehs = mock.Mock()
# This appears to be a native string in the real txtorcon object... # This appears to be a native string in the real txtorcon object...
ehs.private_key = ensure_str("privkey") ehs.private_key = "privkey"
ehs.hostname = "ONION.onion" ehs.hostname = "ONION.onion"
txtorcon.EphemeralHiddenService = mock.Mock(return_value=ehs) txtorcon.EphemeralHiddenService = mock.Mock(return_value=ehs)
ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None)) ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None))

View File

@ -2,7 +2,6 @@
Ported to Python3. Ported to Python3.
""" """
import six
import os, time, sys import os, time, sys
import yaml import yaml
import json import json
@ -22,7 +21,6 @@ from allmydata.util.cputhreadpool import defer_to_thread, disable_thread_pool_fo
from allmydata.test.common_util import ReallyEqualMixin from allmydata.test.common_util import ReallyEqualMixin
from .no_network import fireNow, LocalWrapper from .no_network import fireNow, LocalWrapper
if six.PY3:
long = int long = int
@ -477,7 +475,7 @@ class YAML(unittest.TestCase):
Unicode and (ASCII) native strings get roundtripped to Unicode strings. Unicode and (ASCII) native strings get roundtripped to Unicode strings.
""" """
data = yaml.safe_dump( data = yaml.safe_dump(
[six.ensure_str("str"), u"unicode", u"\u1234nicode"] ["str", "unicode", "\u1234nicode"]
) )
back = yamlutil.safe_load(data) back = yamlutil.safe_load(data)
self.assertIsInstance(back[0], str) self.assertIsInstance(back[0], str)

View File

@ -5,7 +5,7 @@ Ported to Python 3.
import os.path, re import os.path, re
from urllib.parse import quote as url_quote from urllib.parse import quote as url_quote
import json import json
from six.moves import StringIO from io import StringIO
from bs4 import BeautifulSoup from bs4 import BeautifulSoup

View File

@ -7,8 +7,6 @@ Methods ending in to_string() are actually to_bytes(), possibly should be fixed
in follow-up port. in follow-up port.
""" """
from past.builtins import unicode, long
import re import re
from typing import Type from typing import Type
@ -91,7 +89,7 @@ class CHKFileURI(_BaseURI):
def to_string(self): def to_string(self):
assert isinstance(self.needed_shares, int) assert isinstance(self.needed_shares, int)
assert isinstance(self.total_shares, int) assert isinstance(self.total_shares, int)
assert isinstance(self.size, (int,long)) assert isinstance(self.size, int)
return (b'URI:CHK:%s:%s:%d:%d:%d' % return (b'URI:CHK:%s:%s:%d:%d:%d' %
(base32.b2a(self.key), (base32.b2a(self.key),
@ -147,7 +145,7 @@ class CHKFileVerifierURI(_BaseURI):
def to_string(self): def to_string(self):
assert isinstance(self.needed_shares, int) assert isinstance(self.needed_shares, int)
assert isinstance(self.total_shares, int) assert isinstance(self.total_shares, int)
assert isinstance(self.size, (int,long)) assert isinstance(self.size, int)
return (b'URI:CHK-Verifier:%s:%s:%d:%d:%d' % return (b'URI:CHK-Verifier:%s:%s:%d:%d:%d' %
(si_b2a(self.storage_index), (si_b2a(self.storage_index),
@ -742,7 +740,7 @@ ALLEGED_IMMUTABLE_PREFIX = b'imm.'
def from_string(u, deep_immutable=False, name=u"<unknown name>"): def from_string(u, deep_immutable=False, name=u"<unknown name>"):
"""Create URI from either unicode or byte string.""" """Create URI from either unicode or byte string."""
if isinstance(u, unicode): if isinstance(u, str):
u = u.encode("utf-8") u = u.encode("utf-8")
if not isinstance(u, bytes): if not isinstance(u, bytes):
raise TypeError("URI must be unicode string or bytes: %r" % (u,)) raise TypeError("URI must be unicode string or bytes: %r" % (u,))
@ -844,7 +842,7 @@ def is_uri(s):
return False return False
def is_literal_file_uri(s): def is_literal_file_uri(s):
if isinstance(s, unicode): if isinstance(s, str):
s = s.encode("utf-8") s = s.encode("utf-8")
if not isinstance(s, bytes): if not isinstance(s, bytes):
return False return False
@ -853,7 +851,7 @@ def is_literal_file_uri(s):
s.startswith(ALLEGED_IMMUTABLE_PREFIX + b'URI:LIT:')) s.startswith(ALLEGED_IMMUTABLE_PREFIX + b'URI:LIT:'))
def has_uri_prefix(s): def has_uri_prefix(s):
if isinstance(s, unicode): if isinstance(s, str):
s = s.encode("utf-8") s = s.encode("utf-8")
if not isinstance(s, bytes): if not isinstance(s, bytes):
return False return False
@ -895,9 +893,9 @@ def pack_extension(data):
pieces = [] pieces = []
for k in sorted(data.keys()): for k in sorted(data.keys()):
value = data[k] value = data[k]
if isinstance(value, (int, long)): if isinstance(value, int):
value = b"%d" % value value = b"%d" % value
if isinstance(k, unicode): if isinstance(k, str):
k = k.encode("utf-8") k = k.encode("utf-8")
assert isinstance(value, bytes), k assert isinstance(value, bytes), k
assert re.match(br'^[a-zA-Z_\-]+$', k) assert re.match(br'^[a-zA-Z_\-]+$', k)

View File

@ -4,13 +4,6 @@ Base62 encoding.
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY2
if PY2:
import string
maketrans = string.maketrans
translate = string.translate
else:
maketrans = bytes.maketrans maketrans = bytes.maketrans
translate = bytes.translate translate = bytes.translate

View File

@ -8,9 +8,6 @@ Once Python 2 support is dropped, most of this module will obsolete, since
Unicode is the default everywhere in Python 3. Unicode is the default everywhere in Python 3.
""" """
from future.utils import PY3, native_str
from future.builtins import str as future_str
from past.builtins import unicode from past.builtins import unicode
from six import ensure_str from six import ensure_str
@ -56,25 +53,13 @@ def check_encoding(encoding):
io_encoding = "utf-8" io_encoding = "utf-8"
filesystem_encoding = None filesystem_encoding = None
is_unicode_platform = False is_unicode_platform = True
use_unicode_filepath = False use_unicode_filepath = True
def _reload(): def _reload():
global filesystem_encoding, is_unicode_platform, use_unicode_filepath global filesystem_encoding
filesystem_encoding = canonical_encoding(sys.getfilesystemencoding()) filesystem_encoding = canonical_encoding(sys.getfilesystemencoding())
check_encoding(filesystem_encoding) check_encoding(filesystem_encoding)
is_unicode_platform = PY3 or sys.platform in ["win32", "darwin"]
# Despite the Unicode-mode FilePath support added to Twisted in
# <https://twistedmatrix.com/trac/ticket/7805>, we can't yet use
# Unicode-mode FilePaths with INotify on non-Windows platforms due to
# <https://twistedmatrix.com/trac/ticket/7928>. Supposedly 7928 is fixed,
# though... and Tahoe-LAFS doesn't use inotify anymore!
#
# In the interest of not breaking anything, this logic is unchanged for
# Python 2, but on Python 3 the paths are always unicode, like it or not.
use_unicode_filepath = PY3 or sys.platform == "win32"
_reload() _reload()
@ -128,9 +113,7 @@ def unicode_to_argv(s):
Windows, this returns the input unmodified. Windows, this returns the input unmodified.
""" """
precondition(isinstance(s, unicode), s) precondition(isinstance(s, unicode), s)
if PY3: warnings.warn("This is unnecessary.", DeprecationWarning)
warnings.warn("This will be unnecessary once Python 2 is dropped.",
DeprecationWarning)
if sys.platform == "win32": if sys.platform == "win32":
return s return s
return ensure_str(s) return ensure_str(s)
@ -138,7 +121,7 @@ def unicode_to_argv(s):
# According to unicode_to_argv above, the expected type for # According to unicode_to_argv above, the expected type for
# cli args depends on the platform, so capture that expectation. # cli args depends on the platform, so capture that expectation.
argv_type = (future_str, native_str) if sys.platform == "win32" else native_str argv_type = (str,)
""" """
The expected type for args to a subprocess The expected type for args to a subprocess
""" """
@ -184,25 +167,9 @@ def unicode_to_output(s):
the responsibility of stdout/stderr, they expect Unicode by default. the responsibility of stdout/stderr, they expect Unicode by default.
""" """
precondition(isinstance(s, unicode), s) precondition(isinstance(s, unicode), s)
if PY3: warnings.warn("This is unnecessary.", DeprecationWarning)
warnings.warn("This will be unnecessary once Python 2 is dropped.",
DeprecationWarning)
return s return s
try:
out = s.encode(io_encoding)
except (UnicodeEncodeError, UnicodeDecodeError):
raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0,
native_str("A string could not be encoded as %s for output to the terminal:\n%r" %
(io_encoding, repr(s))))
if PRINTABLE_8BIT.search(out) is None:
raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0,
native_str("A string encoded as %s for output to the terminal contained unsafe bytes:\n%r" %
(io_encoding, repr(s))))
return out
def _unicode_escape(m, quote_newlines): def _unicode_escape(m, quote_newlines):
u = m.group(0) u = m.group(0)
if u == u'"' or u == u'$' or u == u'`' or u == u'\\': if u == u'"' or u == u'$' or u == u'`' or u == u'\\':
@ -303,19 +270,6 @@ def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
return b'"%s"' % (escaped.encode(encoding, 'backslashreplace'),) return b'"%s"' % (escaped.encode(encoding, 'backslashreplace'),)
result = _encode(s) result = _encode(s)
if PY3:
# On Python 3 half of what this function does is unnecessary, since
# sys.stdout typically expects Unicode. To ensure no encode errors, one
# can do:
#
# sys.stdout.reconfigure(encoding=sys.stdout.encoding, errors="backslashreplace")
#
# Although the problem is that doesn't work in Python 3.6, only 3.7 or
# later... For now not thinking about it, just returning unicode since
# that is the right thing to do on Python 3.
#
# Now that Python 3.7 is the minimum, this can in theory be done:
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3866
result = result.decode(encoding) result = result.decode(encoding)
return result return result

View File

@ -4,13 +4,6 @@ Hashing utilities.
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY2
if PY2:
# Don't import bytes to prevent leaking future's bytes.
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min, bytes as future_bytes # noqa: F401
else:
future_bytes = bytes
from past.builtins import chr as byteschr from past.builtins import chr as byteschr
import os import os
@ -246,7 +239,7 @@ def bucket_cancel_secret_hash(file_cancel_secret, peerid):
def _xor(a, b): def _xor(a, b):
return b"".join([byteschr(c ^ b) for c in future_bytes(a)]) return b"".join([byteschr(c ^ b) for c in bytes(a)])
def hmac(tag, data): def hmac(tag, data):

View File

@ -104,7 +104,7 @@ def get_local_addresses_sync():
on the local system. on the local system.
""" """
return list( return list(
native_str(address[native_str("addr")]) native_str(address["addr"])
for iface_name for iface_name
in interfaces() in interfaces()
for address for address
@ -161,7 +161,7 @@ def _foolscapEndpointForPortNumber(portnum):
# approach is error prone for the reasons described on # approach is error prone for the reasons described on
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2787 # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2787
portnum = allocate_tcp_port() portnum = allocate_tcp_port()
return (portnum, native_str("tcp:%d" % (portnum,))) return (portnum, "tcp:%d" % portnum)
@implementer(IStreamServerEndpoint) @implementer(IStreamServerEndpoint)
@ -210,7 +210,7 @@ def listenOnUnused(tub, portnum=None):
""" """
portnum, endpoint = _foolscapEndpointForPortNumber(portnum) portnum, endpoint = _foolscapEndpointForPortNumber(portnum)
tub.listenOn(endpoint) tub.listenOn(endpoint)
tub.setLocation(native_str("localhost:%d" % (portnum,))) tub.setLocation("localhost:%d" % portnum)
return portnum return portnum

View File

@ -4,7 +4,6 @@ Logging utilities.
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY2
from six import ensure_str from six import ensure_str
from pyutil import nummedobj from pyutil import nummedobj
@ -12,10 +11,6 @@ from pyutil import nummedobj
from foolscap.logging import log from foolscap.logging import log
from twisted.python import log as tw_log from twisted.python import log as tw_log
if PY2:
def bytes_to_unicode(ign, obj):
return obj
else:
# We want to convert bytes keys to Unicode, otherwise JSON serialization # We want to convert bytes keys to Unicode, otherwise JSON serialization
# inside foolscap will fail (for details see # inside foolscap will fail (for details see
# https://github.com/warner/foolscap/issues/88) # https://github.com/warner/foolscap/issues/88)

View File

@ -4,8 +4,6 @@ Netstring encoding and decoding.
Ported to Python 3. Ported to Python 3.
""" """
from past.builtins import long
try: try:
from typing import Optional, Tuple, List # noqa: F401 from typing import Optional, Tuple, List # noqa: F401
except ImportError: except ImportError:
@ -27,7 +25,7 @@ def split_netstring(data, numstrings,
data does not exactly equal 'required_trailer'.""" data does not exactly equal 'required_trailer'."""
assert isinstance(data, bytes) assert isinstance(data, bytes)
assert required_trailer is None or isinstance(required_trailer, bytes) assert required_trailer is None or isinstance(required_trailer, bytes)
assert isinstance(position, (int, long)), (repr(position), type(position)) assert isinstance(position, int), (repr(position), type(position))
elements = [] elements = []
assert numstrings >= 0 assert numstrings >= 0
while position < len(data): while position < len(data):

View File

@ -2,8 +2,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from past.builtins import long
import itertools import itertools
import hashlib import hashlib
import re import re
@ -1393,7 +1391,7 @@ class StatusElement(Element):
size = op.get_size() size = op.get_size()
if size is None: if size is None:
size = "(unknown)" size = "(unknown)"
elif isinstance(size, (int, long, float)): elif isinstance(size, (int, float)):
size = abbreviate_size(size) size = abbreviate_size(size)
result["total_size"] = size result["total_size"] = size

View File

@ -1,7 +1,3 @@
from future.utils import PY3
from past.builtins import unicode
# This code isn't loadable or sensible except on Windows. Importers all know # This code isn't loadable or sensible except on Windows. Importers all know
# this and are careful. Normally I would just let an import error from ctypes # this and are careful. Normally I would just let an import error from ctypes
# explain any mistakes but Mypy also needs some help here. This assert # explain any mistakes but Mypy also needs some help here. This assert
@ -15,104 +11,19 @@ from past.builtins import unicode
import sys import sys
assert sys.platform == "win32" assert sys.platform == "win32"
import codecs
from functools import partial
from ctypes import WINFUNCTYPE, windll, POINTER, c_int, WinError, byref, get_last_error
from ctypes.wintypes import BOOL, HANDLE, DWORD, LPWSTR, LPCWSTR, LPVOID
# <https://msdn.microsoft.com/en-us/library/ms680621%28VS.85%29.aspx> # <https://msdn.microsoft.com/en-us/library/ms680621%28VS.85%29.aspx>
from win32api import ( from win32api import (
STD_OUTPUT_HANDLE,
STD_ERROR_HANDLE,
SetErrorMode, SetErrorMode,
# <https://msdn.microsoft.com/en-us/library/ms683231(VS.85).aspx>
# HANDLE WINAPI GetStdHandle(DWORD nStdHandle);
# returns INVALID_HANDLE_VALUE, NULL, or a valid handle
GetStdHandle,
) )
from win32con import ( from win32con import (
SEM_FAILCRITICALERRORS, SEM_FAILCRITICALERRORS,
SEM_NOOPENFILEERRORBOX, SEM_NOOPENFILEERRORBOX,
) )
from win32file import (
INVALID_HANDLE_VALUE,
FILE_TYPE_CHAR,
# <https://msdn.microsoft.com/en-us/library/aa364960(VS.85).aspx>
# DWORD WINAPI GetFileType(DWORD hFile);
GetFileType,
)
from allmydata.util import (
log,
)
# Keep track of whether `initialize` has run so we don't do any of the # Keep track of whether `initialize` has run so we don't do any of the
# initialization more than once. # initialization more than once.
_done = False _done = False
#
# pywin32 for Python 2.7 does not bind any of these *W variants so we do it
# ourselves.
#
# <https://msdn.microsoft.com/en-us/library/windows/desktop/ms687401%28v=vs.85%29.aspx>
# BOOL WINAPI WriteConsoleW(HANDLE hOutput, LPWSTR lpBuffer, DWORD nChars,
# LPDWORD lpCharsWritten, LPVOID lpReserved);
WriteConsoleW = WINFUNCTYPE(
BOOL, HANDLE, LPWSTR, DWORD, POINTER(DWORD), LPVOID,
use_last_error=True
)(("WriteConsoleW", windll.kernel32))
# <https://msdn.microsoft.com/en-us/library/windows/desktop/ms683156%28v=vs.85%29.aspx>
GetCommandLineW = WINFUNCTYPE(
LPWSTR,
use_last_error=True
)(("GetCommandLineW", windll.kernel32))
# <https://msdn.microsoft.com/en-us/library/windows/desktop/bb776391%28v=vs.85%29.aspx>
CommandLineToArgvW = WINFUNCTYPE(
POINTER(LPWSTR), LPCWSTR, POINTER(c_int),
use_last_error=True
)(("CommandLineToArgvW", windll.shell32))
# <https://msdn.microsoft.com/en-us/library/ms683167(VS.85).aspx>
# BOOL WINAPI GetConsoleMode(HANDLE hConsole, LPDWORD lpMode);
GetConsoleMode = WINFUNCTYPE(
BOOL, HANDLE, POINTER(DWORD),
use_last_error=True
)(("GetConsoleMode", windll.kernel32))
STDOUT_FILENO = 1
STDERR_FILENO = 2
def get_argv():
"""
:return [unicode]: The argument list this process was invoked with, as
unicode.
Python 2 does not do a good job exposing this information in
``sys.argv`` on Windows so this code re-retrieves the underlying
information using Windows API calls and massages it into the right
shape.
"""
command_line = GetCommandLineW()
argc = c_int(0)
argv_unicode = CommandLineToArgvW(command_line, byref(argc))
if argv_unicode is None:
raise WinError(get_last_error())
# Convert it to a normal Python list
return list(
argv_unicode[i]
for i
in range(argc.value)
)
def initialize(): def initialize():
global _done global _done
@ -122,188 +33,3 @@ def initialize():
_done = True _done = True
SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOOPENFILEERRORBOX) SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOOPENFILEERRORBOX)
if PY3:
# The rest of this appears to be Python 2-specific
return
original_stderr = sys.stderr
# If any exception occurs in this code, we'll probably try to print it on stderr,
# which makes for frustrating debugging if stderr is directed to our wrapper.
# So be paranoid about catching errors and reporting them to original_stderr,
# so that we can at least see them.
def _complain(output_file, message):
print(isinstance(message, str) and message or repr(message), file=output_file)
log.msg(message, level=log.WEIRD)
_complain = partial(_complain, original_stderr)
# Work around <http://bugs.python.org/issue6058>.
codecs.register(lambda name: name == 'cp65001' and codecs.lookup('utf-8') or None)
# Make Unicode console output work independently of the current code page.
# This also fixes <http://bugs.python.org/issue1602>.
# Credit to Michael Kaplan <https://blogs.msdn.com/b/michkap/archive/2010/04/07/9989346.aspx>
# and TZOmegaTZIOY
# <http://stackoverflow.com/questions/878972/windows-cmd-encoding-change-causes-python-crash/1432462#1432462>.
try:
old_stdout_fileno = None
old_stderr_fileno = None
if hasattr(sys.stdout, 'fileno'):
old_stdout_fileno = sys.stdout.fileno()
if hasattr(sys.stderr, 'fileno'):
old_stderr_fileno = sys.stderr.fileno()
real_stdout = (old_stdout_fileno == STDOUT_FILENO)
real_stderr = (old_stderr_fileno == STDERR_FILENO)
if real_stdout:
hStdout = GetStdHandle(STD_OUTPUT_HANDLE)
if not a_console(hStdout):
real_stdout = False
if real_stderr:
hStderr = GetStdHandle(STD_ERROR_HANDLE)
if not a_console(hStderr):
real_stderr = False
if real_stdout:
sys.stdout = UnicodeOutput(hStdout, None, STDOUT_FILENO, '<Unicode console stdout>', _complain)
else:
sys.stdout = UnicodeOutput(None, sys.stdout, old_stdout_fileno, '<Unicode redirected stdout>', _complain)
if real_stderr:
sys.stderr = UnicodeOutput(hStderr, None, STDERR_FILENO, '<Unicode console stderr>', _complain)
else:
sys.stderr = UnicodeOutput(None, sys.stderr, old_stderr_fileno, '<Unicode redirected stderr>', _complain)
except Exception as e:
_complain("exception %r while fixing up sys.stdout and sys.stderr" % (e,))
argv = list(arg.encode("utf-8") for arg in get_argv())
# Take only the suffix with the same number of arguments as sys.argv.
# This accounts for anything that can cause initial arguments to be stripped,
# for example, the Python interpreter or any options passed to it, or runner
# scripts such as 'coverage run'. It works even if there are no such arguments,
# as in the case of a frozen executable created by bb-freeze or similar.
#
# Also, modify sys.argv in place. If any code has already taken a
# reference to the original argument list object then this ensures that
# code sees the new values. This reliance on mutation of shared state is,
# of course, awful. Why does this function even modify sys.argv? Why not
# have a function that *returns* the properly initialized argv as a new
# list? I don't know.
#
# At least Python 3 gets sys.argv correct so before very much longer we
# should be able to fix this bad design by deleting it.
sys.argv[:] = argv[-len(sys.argv):]
def a_console(handle):
"""
:return: ``True`` if ``handle`` refers to a console, ``False`` otherwise.
"""
if handle == INVALID_HANDLE_VALUE:
return False
return (
# It's a character file (eg a printer or a console)
GetFileType(handle) == FILE_TYPE_CHAR and
# Checking the console mode doesn't fail (thus it's a console)
GetConsoleMode(handle, byref(DWORD())) != 0
)
class UnicodeOutput(object):
"""
``UnicodeOutput`` is a file-like object that encodes unicode to UTF-8 and
writes it to another file or writes unicode natively to the Windows
console.
"""
def __init__(self, hConsole, stream, fileno, name, _complain):
"""
:param hConsole: ``None`` or a handle on the console to which to write
unicode. Mutually exclusive with ``stream``.
:param stream: ``None`` or a file-like object to which to write bytes.
:param fileno: A result to hand back from method of the same name.
:param name: A human-friendly identifier for this output object.
:param _complain: A one-argument callable which accepts bytes to be
written when there's a problem. Care should be taken to not make
this do a write on this object.
"""
self._hConsole = hConsole
self._stream = stream
self._fileno = fileno
self.closed = False
self.softspace = False
self.mode = 'w'
self.encoding = 'utf-8'
self.name = name
self._complain = _complain
from allmydata.util.encodingutil import canonical_encoding
from allmydata.util import log
if hasattr(stream, 'encoding') and canonical_encoding(stream.encoding) != 'utf-8':
log.msg("%s: %r had encoding %r, but we're going to write UTF-8 to it" %
(name, stream, stream.encoding), level=log.CURIOUS)
self.flush()
def isatty(self):
return False
def close(self):
# don't really close the handle, that would only cause problems
self.closed = True
def fileno(self):
return self._fileno
def flush(self):
if self._hConsole is None:
try:
self._stream.flush()
except Exception as e:
self._complain("%s.flush: %r from %r" % (self.name, e, self._stream))
raise
def write(self, text):
try:
if self._hConsole is None:
# There is no Windows console available. That means we are
# responsible for encoding the unicode to a byte string to
# write it to a Python file object.
if isinstance(text, unicode):
text = text.encode('utf-8')
self._stream.write(text)
else:
# There is a Windows console available. That means Windows is
# responsible for dealing with the unicode itself.
if not isinstance(text, unicode):
text = str(text).decode('utf-8')
remaining = len(text)
while remaining > 0:
n = DWORD(0)
# There is a shorter-than-documented limitation on the
# length of the string passed to WriteConsoleW (see
# #1232).
retval = WriteConsoleW(self._hConsole, text, min(remaining, 10000), byref(n), None)
if retval == 0:
raise IOError("WriteConsoleW failed with WinError: %s" % (WinError(get_last_error()),))
if n.value == 0:
raise IOError("WriteConsoleW returned %r, n.value = 0" % (retval,))
remaining -= n.value
if remaining == 0: break
text = text[n.value:]
except Exception as e:
self._complain("%s.write: %r" % (self.name, e))
raise
def writelines(self, lines):
try:
for line in lines:
self.write(line)
except Exception as e:
self._complain("%s.writelines: %r" % (self.name, e))
raise