remove more Python2 compatibility

This commit is contained in:
Alexandre Detiste 2024-02-28 00:49:07 +01:00
parent 3fb0bcfff7
commit 53084f76ce
40 changed files with 96 additions and 371 deletions

View File

@ -24,7 +24,7 @@
import os, sys, subprocess, json, tempfile, zipfile, re, itertools import os, sys, subprocess, json, tempfile, zipfile, re, itertools
import email.parser import email.parser
from pprint import pprint from pprint import pprint
from six.moves import StringIO from io import StringIO
import click import click
all_packages = {} # name -> version all_packages = {} # name -> version

View File

@ -4,13 +4,5 @@ Monkey-patching of third party libraries.
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY2
def patch(): def patch():
"""Path third-party libraries to make Tahoe-LAFS work.""" """Path third-party libraries to make Tahoe-LAFS work."""
if not PY2:
# Python 3 doesn't need to monkey patch Foolscap
return

View File

@ -10,8 +10,6 @@ objects that `cryptography` documents.
Ported to Python 3. Ported to Python 3.
""" """
import six
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import ( from cryptography.hazmat.primitives.ciphers import (
Cipher, Cipher,
@ -79,7 +77,7 @@ def encrypt_data(encryptor, plaintext):
""" """
_validate_cryptor(encryptor, encrypt=True) _validate_cryptor(encryptor, encrypt=True)
if not isinstance(plaintext, six.binary_type): if not isinstance(plaintext, bytes):
raise ValueError('Plaintext must be bytes') raise ValueError('Plaintext must be bytes')
return encryptor.update(plaintext) return encryptor.update(plaintext)
@ -118,7 +116,7 @@ def decrypt_data(decryptor, plaintext):
""" """
_validate_cryptor(decryptor, encrypt=False) _validate_cryptor(decryptor, encrypt=False)
if not isinstance(plaintext, six.binary_type): if not isinstance(plaintext, bytes):
raise ValueError('Plaintext must be bytes') raise ValueError('Plaintext must be bytes')
return decryptor.update(plaintext) return decryptor.update(plaintext)
@ -160,7 +158,7 @@ def _validate_key(key):
""" """
confirm `key` is suitable for AES encryption, or raise ValueError confirm `key` is suitable for AES encryption, or raise ValueError
""" """
if not isinstance(key, six.binary_type): if not isinstance(key, bytes):
raise TypeError('Key must be bytes') raise TypeError('Key must be bytes')
if len(key) not in (16, 32): if len(key) not in (16, 32):
raise ValueError('Key must be 16 or 32 bytes long') raise ValueError('Key must be 16 or 32 bytes long')
@ -175,7 +173,7 @@ def _validate_iv(iv):
""" """
if iv is None: if iv is None:
return DEFAULT_IV return DEFAULT_IV
if not isinstance(iv, six.binary_type): if not isinstance(iv, bytes):
raise TypeError('IV must be bytes') raise TypeError('IV must be bytes')
if len(iv) != 16: if len(iv) != 16:
raise ValueError('IV must be 16 bytes long') raise ValueError('IV must be 16 bytes long')

View File

@ -45,9 +45,6 @@ noisy = True
from allmydata.util.log import NOISY, OPERATIONAL, WEIRD, \ from allmydata.util.log import NOISY, OPERATIONAL, WEIRD, \
msg as logmsg, PrefixingLogMixin msg as logmsg, PrefixingLogMixin
if six.PY3:
long = int
def createSFTPError(errorCode, errorMessage): def createSFTPError(errorCode, errorMessage):
""" """

View File

@ -112,8 +112,8 @@ def formatTimeTahoeStyle(self, when):
""" """
d = datetime.datetime.utcfromtimestamp(when) d = datetime.datetime.utcfromtimestamp(when)
if d.microsecond: if d.microsecond:
return d.isoformat(ensure_str(" "))[:-3]+"Z" return d.isoformat(" ")[:-3]+"Z"
return d.isoformat(ensure_str(" ")) + ".000Z" return d.isoformat(" ") + ".000Z"
PRIV_README = """ PRIV_README = """
This directory contains files which contain private data for the Tahoe node, This directory contains files which contain private data for the Tahoe node,

View File

@ -3,7 +3,6 @@ Ported to Python 3.
""" """
import sys import sys
import six
from allmydata.util.assertutil import precondition from allmydata.util.assertutil import precondition
from allmydata.util.fileutil import abspath_expanduser_unicode from allmydata.util.fileutil import abspath_expanduser_unicode
@ -13,10 +12,10 @@ if sys.platform == 'win32':
from allmydata.windows import registry from allmydata.windows import registry
path = registry.get_base_dir_path() path = registry.get_base_dir_path()
if path: if path:
precondition(isinstance(path, six.text_type), path) precondition(isinstance(path, str), path)
_default_nodedir = abspath_expanduser_unicode(path) _default_nodedir = abspath_expanduser_unicode(path)
if _default_nodedir is None: if _default_nodedir is None:
path = abspath_expanduser_unicode(u"~/.tahoe") path = abspath_expanduser_unicode("~/.tahoe")
precondition(isinstance(path, six.text_type), path) precondition(isinstance(path, str), path)
_default_nodedir = path _default_nodedir = path

View File

@ -65,8 +65,8 @@ class Options(usage.Options):
] ]
optParameters = [ optParameters = [
["node-directory", "d", None, NODEDIR_HELP], ["node-directory", "d", None, NODEDIR_HELP],
["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", six.text_type], ["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", str],
["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", six.text_type], ["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", str],
] ]
def opt_version(self): def opt_version(self):
@ -262,7 +262,7 @@ def _setup_coverage(reactor, argv):
# can we put this _setup_coverage call after we hit # can we put this _setup_coverage call after we hit
# argument-parsing? # argument-parsing?
# ensure_str() only necessary on Python 2. # ensure_str() only necessary on Python 2.
if six.ensure_str('--coverage') not in sys.argv: if '--coverage' not in sys.argv:
return return
argv.remove('--coverage') argv.remove('--coverage')

View File

@ -2,8 +2,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY3
from six import ensure_str from six import ensure_str
import os, time import os, time
@ -81,8 +79,6 @@ class SlowOperationRunner(object):
if not data["finished"]: if not data["finished"]:
return False return False
if self.options.get("raw"): if self.options.get("raw"):
if PY3:
# need to write bytes!
stdout = stdout.buffer stdout = stdout.buffer
if is_printable_ascii(jdata): if is_printable_ascii(jdata):
stdout.write(jdata) stdout.write(jdata)

View File

@ -2,7 +2,7 @@
Ported to Python 3. Ported to Python 3.
""" """
from six import ensure_str, ensure_text from six import ensure_text
from urllib.parse import quote as url_quote from urllib.parse import quote as url_quote
import json import json
@ -168,7 +168,7 @@ class DeepCheckOutput(LineOnlyReceiver, object):
# LIT files and directories do not have a "summary" field. # LIT files and directories do not have a "summary" field.
summary = cr.get("summary", "Healthy (LIT)") summary = cr.get("summary", "Healthy (LIT)")
# When Python 2 is dropped the ensure_text()/ensure_str() will be unnecessary. # When Python 2 is dropped the ensure_text()/ensure_str() will be unnecessary.
print(ensure_text(ensure_str("%s: %s") % (quote_path(path), quote_output(summary, quotemarks=False)), print(ensure_text("%s: %s" % (quote_path(path), quote_output(summary, quotemarks=False)),
encoding=get_io_encoding()), file=stdout) encoding=get_io_encoding()), file=stdout)
# always print out corrupt shares # always print out corrupt shares
@ -246,13 +246,11 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver, object):
if not path: if not path:
path = ["<root>"] path = ["<root>"]
# we don't seem to have a summary available, so build one # we don't seem to have a summary available, so build one
# When Python 2 is dropped the ensure_text/ensure_str crap can be
# dropped.
if was_healthy: if was_healthy:
summary = ensure_str("healthy") summary = "healthy"
else: else:
summary = ensure_str("not healthy") summary = "not healthy"
print(ensure_text(ensure_str("%s: %s") % (quote_path(path), summary), print(ensure_text("%s: %s" % (quote_path(path), summary),
encoding=get_io_encoding()), file=stdout) encoding=get_io_encoding()), file=stdout)
# always print out corrupt shares # always print out corrupt shares

View File

@ -2,8 +2,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY3
from urllib.parse import quote as url_quote from urllib.parse import quote as url_quote
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \ from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
UnknownAliasError UnknownAliasError
@ -36,7 +34,7 @@ def get(options):
outf = stdout outf = stdout
# Make sure we can write bytes; on Python 3 stdout is Unicode by # Make sure we can write bytes; on Python 3 stdout is Unicode by
# default. # default.
if PY3 and getattr(outf, "encoding", None) is not None: if getattr(outf, "encoding", None) is not None:
outf = outf.buffer outf = outf.buffer
while True: while True:
data = resp.read(4096) data = resp.read(4096)

View File

@ -2,10 +2,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY3
from six import ensure_str
from urllib.parse import quote as url_quote from urllib.parse import quote as url_quote
import json import json
from twisted.protocols.basic import LineOnlyReceiver from twisted.protocols.basic import LineOnlyReceiver
@ -56,7 +52,6 @@ class ManifestStreamer(LineOnlyReceiver, object):
# use Twisted to split this into lines # use Twisted to split this into lines
self.in_error = False self.in_error = False
# Writing bytes, so need binary stdout. # Writing bytes, so need binary stdout.
if PY3:
stdout = stdout.buffer stdout = stdout.buffer
while True: while True:
chunk = resp.read(100) chunk = resp.read(100)
@ -99,8 +94,7 @@ class ManifestStreamer(LineOnlyReceiver, object):
if vc: if vc:
print(quote_output(vc, quotemarks=False), file=stdout) print(quote_output(vc, quotemarks=False), file=stdout)
else: else:
# ensure_str() only necessary for Python 2. print("%s %s" % (
print(ensure_str("%s %s") % (
quote_output(d["cap"], quotemarks=False), quote_output(d["cap"], quotemarks=False),
quote_path(d["path"], quotemarks=False)), file=stdout) quote_path(d["path"], quotemarks=False)), file=stdout)

View File

@ -2,8 +2,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY3
import os.path import os.path
from allmydata.util import base32 from allmydata.util import base32
@ -43,7 +41,5 @@ def storage_index_to_dir(storageindex):
Returns native string. Returns native string.
""" """
sia = si_b2a(storageindex) sia = si_b2a(storageindex)
if PY3:
# On Python 3 we expect paths to be unicode.
sia = sia.decode("ascii") sia = sia.decode("ascii")
return os.path.join(sia[:2], sia) return os.path.join(sia[:2], sia)

View File

@ -4,9 +4,6 @@ Crawl the storage server shares.
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY2, PY3
import os import os
import time import time
import json import json
@ -150,9 +147,6 @@ def _dump_json_to_file(js, afile):
""" """
with afile.open("wb") as f: with afile.open("wb") as f:
data = json.dumps(js) data = json.dumps(js)
if PY2:
f.write(data)
else:
f.write(data.encode("utf8")) f.write(data.encode("utf8"))
@ -249,8 +243,6 @@ class ShareCrawler(service.MultiService):
self._state_serializer = _LeaseStateSerializer(statefile) self._state_serializer = _LeaseStateSerializer(statefile)
self.prefixes = [si_b2a(struct.pack(">H", i << (16-10)))[:2] self.prefixes = [si_b2a(struct.pack(">H", i << (16-10)))[:2]
for i in range(2**10)] for i in range(2**10)]
if PY3:
# On Python 3 we expect the paths to be unicode, not bytes.
self.prefixes = [p.decode("ascii") for p in self.prefixes] self.prefixes = [p.decode("ascii") for p in self.prefixes]
self.prefixes.sort() self.prefixes.sort()
self.timer = None self.timer = None

View File

@ -2,10 +2,8 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY2
import os.path import os.path
from six.moves import cStringIO as StringIO from io import StringIO
from datetime import timedelta from datetime import timedelta
import re import re
@ -421,9 +419,6 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
else: else:
return original_open(name, *args, **kwargs) return original_open(name, *args, **kwargs)
if PY2:
from allmydata.scripts import cli as module_to_patch
else:
import builtins as module_to_patch import builtins as module_to_patch
patcher = MonkeyPatcher((module_to_patch, 'open', call_file)) patcher = MonkeyPatcher((module_to_patch, 'open', call_file))
patcher.runWithPatches(parse_options, basedir, "backup", ['--exclude-from-utf-8', unicode_to_argv(exclude_file), 'from', 'to']) patcher.runWithPatches(parse_options, basedir, "backup", ['--exclude-from-utf-8', unicode_to_argv(exclude_file), 'from', 'to'])

View File

@ -4,7 +4,7 @@ Ported to Python 3.
import sys import sys
import os.path, time import os.path, time
from six.moves import cStringIO as StringIO from io import StringIO
from twisted.trial import unittest from twisted.trial import unittest
from allmydata.util import fileutil from allmydata.util import fileutil

View File

@ -3,7 +3,7 @@ from six import ensure_text
import os.path import os.path
import json import json
from twisted.trial import unittest from twisted.trial import unittest
from six.moves import cStringIO as StringIO from io import StringIO
from allmydata import uri from allmydata import uri
from allmydata.util import base32 from allmydata.util import base32

View File

@ -2,7 +2,7 @@
Ported to Python 3. Ported to Python 3.
""" """
from six.moves import cStringIO as StringIO from io import StringIO
import re import re
from six import ensure_text from six import ensure_text

View File

@ -2,7 +2,7 @@
Ported to Python 3. Ported to Python 3.
""" """
from six.moves import StringIO from io import StringIO
import os.path import os.path
from twisted.trial import unittest from twisted.trial import unittest
from urllib.parse import quote as url_quote from urllib.parse import quote as url_quote

View File

@ -2,9 +2,6 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY3
from six import ensure_str
from twisted.trial import unittest from twisted.trial import unittest
from twisted.internet import defer from twisted.internet import defer
@ -26,10 +23,6 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
good_arg = u"g\u00F6\u00F6d" good_arg = u"g\u00F6\u00F6d"
good_out = u"g\u00F6\u00F6d" good_out = u"g\u00F6\u00F6d"
# On Python 2 we get bytes, so we need encoded version. On Python 3
# stdio is unicode so can leave unchanged.
good_out_encoded = good_out if PY3 else good_out.encode(get_io_encoding())
d = c0.create_dirnode() d = c0.create_dirnode()
def _stash_root_and_create_file(n): def _stash_root_and_create_file(n):
self.rootnode = n self.rootnode = n
@ -52,7 +45,7 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
(rc, out, err) = args (rc, out, err) = args
self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(rc, 0)
self.assertEqual(len(err), 0, err) self.assertEqual(len(err), 0, err)
expected = sorted([ensure_str("0share"), ensure_str("1share"), good_out_encoded]) expected = sorted(["0share", "1share", good_out])
self.assertEqual(sorted(out.splitlines()), expected) self.assertEqual(sorted(out.splitlines()), expected)
d.addCallback(_check1) d.addCallback(_check1)
d.addCallback(lambda ign: self.do_cli("ls", "missing")) d.addCallback(lambda ign: self.do_cli("ls", "missing"))
@ -85,8 +78,8 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
# listing a file (as dir/filename) should have the edge metadata, # listing a file (as dir/filename) should have the edge metadata,
# including the filename # including the filename
self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(rc, 0)
self.failUnlessIn(good_out_encoded, out) self.failUnlessIn(good_out, out)
self.failIfIn(ensure_str("-r-- %d -" % len(small)), out, self.failIfIn("-r-- %d -" % len(small), out,
"trailing hyphen means unknown date") "trailing hyphen means unknown date")
if good_arg is not None: if good_arg is not None:

View File

@ -5,7 +5,7 @@ Tests for ``allmydata.scripts.tahoe_run``.
from __future__ import annotations from __future__ import annotations
import re import re
from six.moves import ( from io import (
StringIO, StringIO,
) )

View File

@ -2,7 +2,7 @@
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY2, PY3, bchr, binary_type from future.utils import bchr
from future.builtins import str as future_str from future.builtins import str as future_str
import os import os
@ -13,8 +13,6 @@ from functools import (
partial, partial,
) )
from random import randrange from random import randrange
if PY2:
from StringIO import StringIO
from io import ( from io import (
TextIOWrapper, TextIOWrapper,
BytesIO, BytesIO,
@ -101,22 +99,7 @@ def run_cli_native(verb, *args, **kwargs):
) )
argv = ["tahoe"] + nodeargs + [verb] + list(args) argv = ["tahoe"] + nodeargs + [verb] + list(args)
stdin = kwargs.get("stdin", "") stdin = kwargs.get("stdin", "")
if PY2: if True:
# The original behavior, the Python 2 behavior, is to accept either
# bytes or unicode and try to automatically encode or decode as
# necessary. This works okay for ASCII and if LANG is set
# appropriately. These aren't great constraints so we should move
# away from this behavior.
#
# The encoding attribute doesn't change StringIO behavior on Python 2,
# but it's there for realism of the emulation.
stdin = StringIO(stdin)
stdin.encoding = encoding
stdout = StringIO()
stdout.encoding = encoding
stderr = StringIO()
stderr.encoding = encoding
else:
# The new behavior, the Python 3 behavior, is to accept unicode and # The new behavior, the Python 3 behavior, is to accept unicode and
# encode it using a specific encoding. For older versions of Python 3, # encode it using a specific encoding. For older versions of Python 3,
# the encoding is determined from LANG (bad) but for newer Python 3, # the encoding is determined from LANG (bad) but for newer Python 3,
@ -146,13 +129,13 @@ def run_cli_native(verb, *args, **kwargs):
stderr=stderr, stderr=stderr,
) )
def _done(rc, stdout=stdout, stderr=stderr): def _done(rc, stdout=stdout, stderr=stderr):
if return_bytes and PY3: if return_bytes:
stdout = stdout.buffer stdout = stdout.buffer
stderr = stderr.buffer stderr = stderr.buffer
return 0, _getvalue(stdout), _getvalue(stderr) return 0, _getvalue(stdout), _getvalue(stderr)
def _err(f, stdout=stdout, stderr=stderr): def _err(f, stdout=stdout, stderr=stderr):
f.trap(SystemExit) f.trap(SystemExit)
if return_bytes and PY3: if return_bytes:
stdout = stdout.buffer stdout = stdout.buffer
stderr = stderr.buffer stderr = stderr.buffer
return f.value.code, _getvalue(stdout), _getvalue(stderr) return f.value.code, _getvalue(stdout), _getvalue(stderr)
@ -189,10 +172,6 @@ def run_cli_unicode(verb, argv, nodeargs=None, stdin=None, encoding=None):
argv=argv, argv=argv,
) )
codec = encoding or "ascii" codec = encoding or "ascii"
if PY2:
encode = lambda t: None if t is None else t.encode(codec)
else:
# On Python 3 command-line parsing expects Unicode!
encode = lambda t: t encode = lambda t: t
d = run_cli_native( d = run_cli_native(
encode(verb), encode(verb),
@ -238,7 +217,7 @@ def flip_bit(good, which):
def flip_one_bit(s, offset=0, size=None): def flip_one_bit(s, offset=0, size=None):
""" flip one random bit of the string s, in a byte greater than or equal to offset and less """ flip one random bit of the string s, in a byte greater than or equal to offset and less
than offset+size. """ than offset+size. """
precondition(isinstance(s, binary_type)) precondition(isinstance(s, bytes))
if size is None: if size is None:
size=len(s)-offset size=len(s)-offset
i = randrange(offset, offset+size) i = randrange(offset, offset+size)

View File

@ -2,7 +2,7 @@
Ported to Python 3. Ported to Python 3.
""" """
from six.moves import cStringIO as StringIO from io import StringIO
from twisted.internet import defer, reactor from twisted.internet import defer, reactor
from ..common import AsyncBrokenTestCase from ..common import AsyncBrokenTestCase
from testtools.matchers import ( from testtools.matchers import (

View File

@ -2,7 +2,7 @@
Ported to Python 3. Ported to Python 3.
""" """
from six.moves import cStringIO as StringIO from io import StringIO
from ..common import AsyncTestCase from ..common import AsyncTestCase
from testtools.matchers import Equals, HasLength, Contains from testtools.matchers import Equals, HasLength, Contains
from twisted.internet import defer from twisted.internet import defer

View File

@ -2,8 +2,6 @@
This module has been ported to Python 3. This module has been ported to Python 3.
""" """
from future.utils import PY2
import sys import sys
import random import random
@ -31,7 +29,7 @@ class TestFlipOneBit(SyncTestCase):
def test_accepts_byte_string(self): def test_accepts_byte_string(self):
actual = flip_one_bit(b'foo') actual = flip_one_bit(b'foo')
self.assertEqual(actual, b'fno' if PY2 else b'fom') self.assertEqual(actual, b'fom')
def test_rejects_unicode_string(self): def test_rejects_unicode_string(self):
self.assertRaises(AssertionError, flip_one_bit, u'foo') self.assertRaises(AssertionError, flip_one_bit, u'foo')

View File

@ -5,8 +5,6 @@ Ported to Python 3.
""" """
from future.utils import PY3
import time import time
import os.path import os.path
from twisted.trial import unittest from twisted.trial import unittest
@ -28,7 +26,6 @@ class BucketEnumeratingCrawler(ShareCrawler):
self.all_buckets = [] self.all_buckets = []
self.finished_d = defer.Deferred() self.finished_d = defer.Deferred()
def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32): def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32):
if PY3:
# Bucket _inputs_ are bytes, and that's what we will compare this # Bucket _inputs_ are bytes, and that's what we will compare this
# to: # to:
storage_index_b32 = storage_index_b32.encode("ascii") storage_index_b32 = storage_index_b32.encode("ascii")
@ -46,7 +43,6 @@ class PacedCrawler(ShareCrawler):
self.finished_d = defer.Deferred() self.finished_d = defer.Deferred()
self.yield_cb = None self.yield_cb = None
def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32): def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32):
if PY3:
# Bucket _inputs_ are bytes, and that's what we will compare this # Bucket _inputs_ are bytes, and that's what we will compare this
# to: # to:
storage_index_b32 = storage_index_b32.encode("ascii") storage_index_b32 = storage_index_b32.encode("ascii")

View File

@ -1,6 +1,3 @@
from future.utils import native_bytes
import unittest import unittest
from base64 import b64decode from base64 import b64decode
@ -40,7 +37,7 @@ class TestRegression(unittest.TestCase):
# priv_str = b64encode(priv.serialize()) # priv_str = b64encode(priv.serialize())
# pub_str = b64encode(priv.get_verifying_key().serialize()) # pub_str = b64encode(priv.get_verifying_key().serialize())
RSA_2048_PRIV_KEY = b64decode(f.read().strip()) RSA_2048_PRIV_KEY = b64decode(f.read().strip())
assert isinstance(RSA_2048_PRIV_KEY, native_bytes) assert isinstance(RSA_2048_PRIV_KEY, bytes)
with RESOURCE_DIR.child('pycryptopp-rsa-2048-sig.txt').open('r') as f: with RESOURCE_DIR.child('pycryptopp-rsa-2048-sig.txt').open('r') as f:
# Signature created using `RSA_2048_PRIV_KEY` via: # Signature created using `RSA_2048_PRIV_KEY` via:
@ -61,7 +58,7 @@ class TestRegression(unittest.TestCase):
# priv_str = b64encode(priv.serialize()) # priv_str = b64encode(priv.serialize())
# pub_str = b64encode(priv.get_verifying_key().serialize()) # pub_str = b64encode(priv.get_verifying_key().serialize())
RSA_TINY_PRIV_KEY = b64decode(f.read().strip()) RSA_TINY_PRIV_KEY = b64decode(f.read().strip())
assert isinstance(RSA_TINY_PRIV_KEY, native_bytes) assert isinstance(RSA_TINY_PRIV_KEY, bytes)
with RESOURCE_DIR.child('pycryptopp-rsa-32768-priv.txt').open('r') as f: with RESOURCE_DIR.child('pycryptopp-rsa-32768-priv.txt').open('r') as f:
# Created using `pycryptopp`: # Created using `pycryptopp`:
@ -72,7 +69,7 @@ class TestRegression(unittest.TestCase):
# priv_str = b64encode(priv.serialize()) # priv_str = b64encode(priv.serialize())
# pub_str = b64encode(priv.get_verifying_key().serialize()) # pub_str = b64encode(priv.get_verifying_key().serialize())
RSA_HUGE_PRIV_KEY = b64decode(f.read().strip()) RSA_HUGE_PRIV_KEY = b64decode(f.read().strip())
assert isinstance(RSA_HUGE_PRIV_KEY, native_bytes) assert isinstance(RSA_HUGE_PRIV_KEY, bytes)
def test_old_start_up_test(self): def test_old_start_up_test(self):
""" """
@ -324,7 +321,7 @@ class TestEd25519(unittest.TestCase):
private_key, public_key = ed25519.create_signing_keypair() private_key, public_key = ed25519.create_signing_keypair()
private_key_str = ed25519.string_from_signing_key(private_key) private_key_str = ed25519.string_from_signing_key(private_key)
self.assertIsInstance(private_key_str, native_bytes) self.assertIsInstance(private_key_str, bytes)
private_key2, public_key2 = ed25519.signing_keypair_from_string(private_key_str) private_key2, public_key2 = ed25519.signing_keypair_from_string(private_key_str)
@ -340,7 +337,7 @@ class TestEd25519(unittest.TestCase):
# ditto, but for the verifying keys # ditto, but for the verifying keys
public_key_str = ed25519.string_from_verifying_key(public_key) public_key_str = ed25519.string_from_verifying_key(public_key)
self.assertIsInstance(public_key_str, native_bytes) self.assertIsInstance(public_key_str, bytes)
public_key2 = ed25519.verifying_key_from_string(public_key_str) public_key2 = ed25519.verifying_key_from_string(public_key_str)
self.assertEqual( self.assertEqual(
@ -444,7 +441,7 @@ class TestRsa(unittest.TestCase):
priv_key, pub_key = rsa.create_signing_keypair(2048) priv_key, pub_key = rsa.create_signing_keypair(2048)
priv_key_str = rsa.der_string_from_signing_key(priv_key) priv_key_str = rsa.der_string_from_signing_key(priv_key)
self.assertIsInstance(priv_key_str, native_bytes) self.assertIsInstance(priv_key_str, bytes)
priv_key2, pub_key2 = rsa.create_signing_keypair_from_string(priv_key_str) priv_key2, pub_key2 = rsa.create_signing_keypair_from_string(priv_key_str)

View File

@ -10,7 +10,6 @@ from future.utils import bchr
from typing import Any from typing import Any
import six
import os import os
from twisted.trial import unittest from twisted.trial import unittest
from twisted.internet import defer, reactor from twisted.internet import defer, reactor
@ -30,9 +29,6 @@ from allmydata.immutable.downloader.fetcher import SegmentFetcher
from allmydata.codec import CRSDecoder from allmydata.codec import CRSDecoder
from foolscap.eventual import eventually, fireEventually, flushEventualQueue from foolscap.eventual import eventually, fireEventually, flushEventualQueue
if six.PY3:
long = int
plaintext = b"This is a moderate-sized file.\n" * 10 plaintext = b"This is a moderate-sized file.\n" * 10
mutable_plaintext = b"This is a moderate-sized mutable file.\n" * 10 mutable_plaintext = b"This is a moderate-sized mutable file.\n" * 10

View File

@ -1,8 +1,4 @@
from future.utils import PY2, PY3
from past.builtins import unicode
lumiere_nfc = u"lumi\u00E8re" lumiere_nfc = u"lumi\u00E8re"
Artonwall_nfc = u"\u00C4rtonwall.mp3" Artonwall_nfc = u"\u00C4rtonwall.mp3"
Artonwall_nfd = u"A\u0308rtonwall.mp3" Artonwall_nfd = u"A\u0308rtonwall.mp3"
@ -46,12 +42,6 @@ if __name__ == "__main__":
for fname in TEST_FILENAMES: for fname in TEST_FILENAMES:
open(os.path.join(tmpdir, fname), 'w').close() open(os.path.join(tmpdir, fname), 'w').close()
# On Python 2, listing directories returns unicode under Windows or
# MacOS X if the input is unicode. On Python 3, it always returns
# Unicode.
if PY2 and sys.platform in ('win32', 'darwin'):
dirlist = os.listdir(unicode(tmpdir))
else:
dirlist = os.listdir(tmpdir) dirlist = os.listdir(tmpdir)
print(" dirlist = %s" % repr(dirlist)) print(" dirlist = %s" % repr(dirlist))
@ -64,7 +54,6 @@ if __name__ == "__main__":
import os, sys import os, sys
from unittest import skipIf
from twisted.trial import unittest from twisted.trial import unittest
@ -87,15 +76,6 @@ class MockStdout(object):
# The following tests apply only to platforms that don't store filenames as # The following tests apply only to platforms that don't store filenames as
# Unicode entities on the filesystem. # Unicode entities on the filesystem.
class EncodingUtilNonUnicodePlatform(unittest.TestCase): class EncodingUtilNonUnicodePlatform(unittest.TestCase):
@skipIf(PY3, "Python 3 is always Unicode, regardless of OS.")
def setUp(self):
# Make sure everything goes back to the way it was at the end of the
# test.
self.addCleanup(_reload)
# Mock sys.platform because unicode_platform() uses it. Cleanups run
# in reverse order so we do this second so it gets undone first.
self.patch(sys, "platform", "linux")
def test_listdir_unicode(self): def test_listdir_unicode(self):
# What happens if latin1-encoded filenames are encountered on an UTF-8 # What happens if latin1-encoded filenames are encountered on an UTF-8
@ -143,10 +123,7 @@ class EncodingUtil(ReallyEqualMixin):
converts to bytes using UTF-8 elsewhere. converts to bytes using UTF-8 elsewhere.
""" """
result = unicode_to_argv(lumiere_nfc) result = unicode_to_argv(lumiere_nfc)
if PY3 or self.platform == "win32":
expected_value = lumiere_nfc expected_value = lumiere_nfc
else:
expected_value = lumiere_nfc.encode(self.io_encoding)
self.assertIsInstance(result, type(expected_value)) self.assertIsInstance(result, type(expected_value))
self.assertEqual(result, expected_value) self.assertEqual(result, expected_value)
@ -167,9 +144,6 @@ class EncodingUtil(ReallyEqualMixin):
% (self.filesystem_encoding,)) % (self.filesystem_encoding,))
def call_os_listdir(path): def call_os_listdir(path):
if PY2:
return self.dirlist
else:
# Python 3 always lists unicode filenames: # Python 3 always lists unicode filenames:
return [d.decode(self.filesystem_encoding) if isinstance(d, bytes) return [d.decode(self.filesystem_encoding) if isinstance(d, bytes)
else d else d
@ -204,9 +178,6 @@ class StdlibUnicode(unittest.TestCase):
fn = lumiere_nfc + u'/' + lumiere_nfc + u'.txt' fn = lumiere_nfc + u'/' + lumiere_nfc + u'.txt'
open(fn, 'wb').close() open(fn, 'wb').close()
self.failUnless(os.path.exists(fn)) self.failUnless(os.path.exists(fn))
if PY2:
getcwdu = os.getcwdu
else:
getcwdu = os.getcwd getcwdu = os.getcwd
self.failUnless(os.path.exists(os.path.join(getcwdu(), fn))) self.failUnless(os.path.exists(os.path.join(getcwdu(), fn)))
filenames = listdir_unicode(lumiere_nfc) filenames = listdir_unicode(lumiere_nfc)
@ -237,7 +208,7 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
_reload() _reload()
def _check(self, inp, out, enc, optional_quotes, quote_newlines): def _check(self, inp, out, enc, optional_quotes, quote_newlines):
if PY3 and isinstance(out, bytes): if isinstance(out, bytes):
out = out.decode(enc or encodingutil.io_encoding) out = out.decode(enc or encodingutil.io_encoding)
out2 = out out2 = out
if optional_quotes: if optional_quotes:
@ -266,8 +237,6 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
def _test_quote_output_all(self, enc): def _test_quote_output_all(self, enc):
def check(inp, out, optional_quotes=False, quote_newlines=None): def check(inp, out, optional_quotes=False, quote_newlines=None):
if PY3:
# Result is always Unicode on Python 3
out = out.decode("ascii") out = out.decode("ascii")
self._check(inp, out, enc, optional_quotes, quote_newlines) self._check(inp, out, enc, optional_quotes, quote_newlines)
@ -354,8 +323,6 @@ def win32_other(win32, other):
class QuotePaths(ReallyEqualMixin, unittest.TestCase): class QuotePaths(ReallyEqualMixin, unittest.TestCase):
def assertPathsEqual(self, actual, expected): def assertPathsEqual(self, actual, expected):
if PY3:
# On Python 3, results should be unicode:
expected = expected.decode("ascii") expected = expected.decode("ascii")
self.failUnlessReallyEqual(actual, expected) self.failUnlessReallyEqual(actual, expected)

View File

@ -6,8 +6,8 @@ import os
from twisted.trial import unittest from twisted.trial import unittest
from twisted.internet import defer, error from twisted.internet import defer, error
from twisted.python.usage import UsageError from twisted.python.usage import UsageError
from six.moves import StringIO from io import StringIO
import mock from unittest import mock
from ..util import i2p_provider from ..util import i2p_provider
from ..scripts import create_node, runner from ..scripts import create_node, runner

View File

@ -2,10 +2,6 @@
Ported to Python 3 Ported to Python 3
""" """
from future.utils import PY2
from six import ensure_text
import os.path, re, sys import os.path, re, sys
from os import linesep from os import linesep
import locale import locale
@ -129,17 +125,13 @@ def run_bintahoe(extra_argv, python_options=None):
:return: A three-tuple of stdout (unicode), stderr (unicode), and the :return: A three-tuple of stdout (unicode), stderr (unicode), and the
child process "returncode" (int). child process "returncode" (int).
""" """
executable = ensure_text(sys.executable) argv = [sys.executable]
argv = [executable]
if python_options is not None: if python_options is not None:
argv.extend(python_options) argv.extend(python_options)
argv.extend([u"-b", u"-m", u"allmydata.scripts.runner"]) argv.extend([u"-b", u"-m", u"allmydata.scripts.runner"])
argv.extend(extra_argv) argv.extend(extra_argv)
argv = list(unicode_to_argv(arg) for arg in argv) argv = list(unicode_to_argv(arg) for arg in argv)
p = Popen(argv, stdout=PIPE, stderr=PIPE) p = Popen(argv, stdout=PIPE, stderr=PIPE)
if PY2:
encoding = "utf-8"
else:
encoding = locale.getpreferredencoding(False) encoding = locale.getpreferredencoding(False)
out = p.stdout.read().decode(encoding) out = p.stdout.read().decode(encoding)
err = p.stderr.read().decode(encoding) err = p.stderr.read().decode(encoding)
@ -154,9 +146,6 @@ class BinTahoe(common_util.SignalMixin, unittest.TestCase):
""" """
tricky = u"\u00F6" tricky = u"\u00F6"
out, err, returncode = run_bintahoe([tricky]) out, err, returncode = run_bintahoe([tricky])
if PY2:
expected = u"Unknown command: \\xf6"
else:
expected = u"Unknown command: \xf6" expected = u"Unknown command: \xf6"
self.assertEqual(returncode, 1) self.assertEqual(returncode, 1)
self.assertIn( self.assertIn(

View File

@ -4,7 +4,7 @@ Tests for allmydata.util.statistics.
Ported to Python 3. Ported to Python 3.
""" """
from six.moves import StringIO # native string StringIO from io import StringIO
from twisted.trial import unittest from twisted.trial import unittest

View File

@ -9,7 +9,7 @@ import os.path
import re import re
import json import json
from unittest import skipIf from unittest import skipIf
from six.moves import StringIO from io import StringIO
from twisted.trial import unittest from twisted.trial import unittest
from twisted.internet import defer from twisted.internet import defer

View File

@ -5,9 +5,8 @@ Ported to Python 3.
import os import os
from twisted.trial import unittest from twisted.trial import unittest
from twisted.internet import defer, error from twisted.internet import defer, error
from six.moves import StringIO from io import StringIO
from six import ensure_str from unittest import mock
import mock
from ..util import tor_provider from ..util import tor_provider
from ..scripts import create_node, runner from ..scripts import create_node, runner
from foolscap.eventual import flushEventualQueue from foolscap.eventual import flushEventualQueue
@ -185,7 +184,7 @@ class CreateOnion(unittest.TestCase):
txtorcon = mock.Mock() txtorcon = mock.Mock()
ehs = mock.Mock() ehs = mock.Mock()
# This appears to be a native string in the real txtorcon object... # This appears to be a native string in the real txtorcon object...
ehs.private_key = ensure_str("privkey") ehs.private_key = "privkey"
ehs.hostname = "ONION.onion" ehs.hostname = "ONION.onion"
txtorcon.EphemeralHiddenService = mock.Mock(return_value=ehs) txtorcon.EphemeralHiddenService = mock.Mock(return_value=ehs)
ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None)) ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None))

View File

@ -2,7 +2,6 @@
Ported to Python3. Ported to Python3.
""" """
import six
import os, time, sys import os, time, sys
import yaml import yaml
import json import json
@ -22,8 +21,7 @@ from allmydata.util.cputhreadpool import defer_to_thread, disable_thread_pool_fo
from allmydata.test.common_util import ReallyEqualMixin from allmydata.test.common_util import ReallyEqualMixin
from .no_network import fireNow, LocalWrapper from .no_network import fireNow, LocalWrapper
if six.PY3: long = int
long = int
class IDLib(unittest.TestCase): class IDLib(unittest.TestCase):
@ -477,7 +475,7 @@ class YAML(unittest.TestCase):
Unicode and (ASCII) native strings get roundtripped to Unicode strings. Unicode and (ASCII) native strings get roundtripped to Unicode strings.
""" """
data = yaml.safe_dump( data = yaml.safe_dump(
[six.ensure_str("str"), u"unicode", u"\u1234nicode"] ["str", "unicode", "\u1234nicode"]
) )
back = yamlutil.safe_load(data) back = yamlutil.safe_load(data)
self.assertIsInstance(back[0], str) self.assertIsInstance(back[0], str)

View File

@ -5,7 +5,7 @@ Ported to Python 3.
import os.path, re import os.path, re
from urllib.parse import quote as url_quote from urllib.parse import quote as url_quote
import json import json
from six.moves import StringIO from io import StringIO
from bs4 import BeautifulSoup from bs4 import BeautifulSoup

View File

@ -4,15 +4,8 @@ Base62 encoding.
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY2 maketrans = bytes.maketrans
translate = bytes.translate
if PY2:
import string
maketrans = string.maketrans
translate = string.translate
else:
maketrans = bytes.maketrans
translate = bytes.translate
from past.builtins import chr as byteschr from past.builtins import chr as byteschr

View File

@ -8,7 +8,7 @@ Once Python 2 support is dropped, most of this module will obsolete, since
Unicode is the default everywhere in Python 3. Unicode is the default everywhere in Python 3.
""" """
from future.utils import PY3, native_str from future.utils import native_str
from future.builtins import str as future_str from future.builtins import str as future_str
from past.builtins import unicode from past.builtins import unicode
@ -56,25 +56,13 @@ def check_encoding(encoding):
io_encoding = "utf-8" io_encoding = "utf-8"
filesystem_encoding = None filesystem_encoding = None
is_unicode_platform = False is_unicode_platform = True
use_unicode_filepath = False use_unicode_filepath = True
def _reload(): def _reload():
global filesystem_encoding, is_unicode_platform, use_unicode_filepath global filesystem_encoding
filesystem_encoding = canonical_encoding(sys.getfilesystemencoding()) filesystem_encoding = canonical_encoding(sys.getfilesystemencoding())
check_encoding(filesystem_encoding) check_encoding(filesystem_encoding)
is_unicode_platform = PY3 or sys.platform in ["win32", "darwin"]
# Despite the Unicode-mode FilePath support added to Twisted in
# <https://twistedmatrix.com/trac/ticket/7805>, we can't yet use
# Unicode-mode FilePaths with INotify on non-Windows platforms due to
# <https://twistedmatrix.com/trac/ticket/7928>. Supposedly 7928 is fixed,
# though... and Tahoe-LAFS doesn't use inotify anymore!
#
# In the interest of not breaking anything, this logic is unchanged for
# Python 2, but on Python 3 the paths are always unicode, like it or not.
use_unicode_filepath = PY3 or sys.platform == "win32"
_reload() _reload()
@ -128,9 +116,7 @@ def unicode_to_argv(s):
Windows, this returns the input unmodified. Windows, this returns the input unmodified.
""" """
precondition(isinstance(s, unicode), s) precondition(isinstance(s, unicode), s)
if PY3: warnings.warn("This is unnecessary.", DeprecationWarning)
warnings.warn("This will be unnecessary once Python 2 is dropped.",
DeprecationWarning)
if sys.platform == "win32": if sys.platform == "win32":
return s return s
return ensure_str(s) return ensure_str(s)
@ -184,25 +170,9 @@ def unicode_to_output(s):
the responsibility of stdout/stderr, they expect Unicode by default. the responsibility of stdout/stderr, they expect Unicode by default.
""" """
precondition(isinstance(s, unicode), s) precondition(isinstance(s, unicode), s)
if PY3: warnings.warn("This is unnecessary.", DeprecationWarning)
warnings.warn("This will be unnecessary once Python 2 is dropped.",
DeprecationWarning)
return s return s
try:
out = s.encode(io_encoding)
except (UnicodeEncodeError, UnicodeDecodeError):
raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0,
native_str("A string could not be encoded as %s for output to the terminal:\n%r" %
(io_encoding, repr(s))))
if PRINTABLE_8BIT.search(out) is None:
raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0,
native_str("A string encoded as %s for output to the terminal contained unsafe bytes:\n%r" %
(io_encoding, repr(s))))
return out
def _unicode_escape(m, quote_newlines): def _unicode_escape(m, quote_newlines):
u = m.group(0) u = m.group(0)
if u == u'"' or u == u'$' or u == u'`' or u == u'\\': if u == u'"' or u == u'$' or u == u'`' or u == u'\\':
@ -303,19 +273,6 @@ def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
return b'"%s"' % (escaped.encode(encoding, 'backslashreplace'),) return b'"%s"' % (escaped.encode(encoding, 'backslashreplace'),)
result = _encode(s) result = _encode(s)
if PY3:
# On Python 3 half of what this function does is unnecessary, since
# sys.stdout typically expects Unicode. To ensure no encode errors, one
# can do:
#
# sys.stdout.reconfigure(encoding=sys.stdout.encoding, errors="backslashreplace")
#
# Although the problem is that doesn't work in Python 3.6, only 3.7 or
# later... For now not thinking about it, just returning unicode since
# that is the right thing to do on Python 3.
#
# Now that Python 3.7 is the minimum, this can in theory be done:
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3866
result = result.decode(encoding) result = result.decode(encoding)
return result return result

View File

@ -4,13 +4,6 @@ Hashing utilities.
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY2
if PY2:
# Don't import bytes to prevent leaking future's bytes.
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min, bytes as future_bytes # noqa: F401
else:
future_bytes = bytes
from past.builtins import chr as byteschr from past.builtins import chr as byteschr
import os import os
@ -246,7 +239,7 @@ def bucket_cancel_secret_hash(file_cancel_secret, peerid):
def _xor(a, b): def _xor(a, b):
return b"".join([byteschr(c ^ b) for c in future_bytes(a)]) return b"".join([byteschr(c ^ b) for c in bytes(a)])
def hmac(tag, data): def hmac(tag, data):

View File

@ -4,7 +4,6 @@ Logging utilities.
Ported to Python 3. Ported to Python 3.
""" """
from future.utils import PY2
from six import ensure_str from six import ensure_str
from pyutil import nummedobj from pyutil import nummedobj
@ -12,14 +11,10 @@ from pyutil import nummedobj
from foolscap.logging import log from foolscap.logging import log
from twisted.python import log as tw_log from twisted.python import log as tw_log
if PY2: # We want to convert bytes keys to Unicode, otherwise JSON serialization
def bytes_to_unicode(ign, obj): # inside foolscap will fail (for details see
return obj # https://github.com/warner/foolscap/issues/88)
else: from .jsonbytes import bytes_to_unicode
# We want to convert bytes keys to Unicode, otherwise JSON serialization
# inside foolscap will fail (for details see
# https://github.com/warner/foolscap/issues/88)
from .jsonbytes import bytes_to_unicode
NOISY = log.NOISY # 10 NOISY = log.NOISY # 10

View File

@ -1,7 +1,3 @@
from future.utils import PY3
from past.builtins import unicode
# This code isn't loadable or sensible except on Windows. Importers all know # This code isn't loadable or sensible except on Windows. Importers all know
# this and are careful. Normally I would just let an import error from ctypes # this and are careful. Normally I would just let an import error from ctypes
# explain any mistakes but Mypy also needs some help here. This assert # explain any mistakes but Mypy also needs some help here. This assert
@ -123,82 +119,6 @@ def initialize():
SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOOPENFILEERRORBOX) SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOOPENFILEERRORBOX)
if PY3:
# The rest of this appears to be Python 2-specific
return
original_stderr = sys.stderr
# If any exception occurs in this code, we'll probably try to print it on stderr,
# which makes for frustrating debugging if stderr is directed to our wrapper.
# So be paranoid about catching errors and reporting them to original_stderr,
# so that we can at least see them.
def _complain(output_file, message):
print(isinstance(message, str) and message or repr(message), file=output_file)
log.msg(message, level=log.WEIRD)
_complain = partial(_complain, original_stderr)
# Work around <http://bugs.python.org/issue6058>.
codecs.register(lambda name: name == 'cp65001' and codecs.lookup('utf-8') or None)
# Make Unicode console output work independently of the current code page.
# This also fixes <http://bugs.python.org/issue1602>.
# Credit to Michael Kaplan <https://blogs.msdn.com/b/michkap/archive/2010/04/07/9989346.aspx>
# and TZOmegaTZIOY
# <http://stackoverflow.com/questions/878972/windows-cmd-encoding-change-causes-python-crash/1432462#1432462>.
try:
old_stdout_fileno = None
old_stderr_fileno = None
if hasattr(sys.stdout, 'fileno'):
old_stdout_fileno = sys.stdout.fileno()
if hasattr(sys.stderr, 'fileno'):
old_stderr_fileno = sys.stderr.fileno()
real_stdout = (old_stdout_fileno == STDOUT_FILENO)
real_stderr = (old_stderr_fileno == STDERR_FILENO)
if real_stdout:
hStdout = GetStdHandle(STD_OUTPUT_HANDLE)
if not a_console(hStdout):
real_stdout = False
if real_stderr:
hStderr = GetStdHandle(STD_ERROR_HANDLE)
if not a_console(hStderr):
real_stderr = False
if real_stdout:
sys.stdout = UnicodeOutput(hStdout, None, STDOUT_FILENO, '<Unicode console stdout>', _complain)
else:
sys.stdout = UnicodeOutput(None, sys.stdout, old_stdout_fileno, '<Unicode redirected stdout>', _complain)
if real_stderr:
sys.stderr = UnicodeOutput(hStderr, None, STDERR_FILENO, '<Unicode console stderr>', _complain)
else:
sys.stderr = UnicodeOutput(None, sys.stderr, old_stderr_fileno, '<Unicode redirected stderr>', _complain)
except Exception as e:
_complain("exception %r while fixing up sys.stdout and sys.stderr" % (e,))
argv = list(arg.encode("utf-8") for arg in get_argv())
# Take only the suffix with the same number of arguments as sys.argv.
# This accounts for anything that can cause initial arguments to be stripped,
# for example, the Python interpreter or any options passed to it, or runner
# scripts such as 'coverage run'. It works even if there are no such arguments,
# as in the case of a frozen executable created by bb-freeze or similar.
#
# Also, modify sys.argv in place. If any code has already taken a
# reference to the original argument list object then this ensures that
# code sees the new values. This reliance on mutation of shared state is,
# of course, awful. Why does this function even modify sys.argv? Why not
# have a function that *returns* the properly initialized argv as a new
# list? I don't know.
#
# At least Python 3 gets sys.argv correct so before very much longer we
# should be able to fix this bad design by deleting it.
sys.argv[:] = argv[-len(sys.argv):]
def a_console(handle): def a_console(handle):
""" """
@ -274,13 +194,13 @@ class UnicodeOutput(object):
# There is no Windows console available. That means we are # There is no Windows console available. That means we are
# responsible for encoding the unicode to a byte string to # responsible for encoding the unicode to a byte string to
# write it to a Python file object. # write it to a Python file object.
if isinstance(text, unicode): if isinstance(text, str):
text = text.encode('utf-8') text = text.encode('utf-8')
self._stream.write(text) self._stream.write(text)
else: else:
# There is a Windows console available. That means Windows is # There is a Windows console available. That means Windows is
# responsible for dealing with the unicode itself. # responsible for dealing with the unicode itself.
if not isinstance(text, unicode): if not isinstance(text, str):
text = str(text).decode('utf-8') text = str(text).decode('utf-8')
remaining = len(text) remaining = len(text)
while remaining > 0: while remaining > 0: