mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-18 20:47:54 +00:00
remove more Python2 compatibility
This commit is contained in:
parent
3fb0bcfff7
commit
53084f76ce
@ -24,7 +24,7 @@
|
||||
import os, sys, subprocess, json, tempfile, zipfile, re, itertools
|
||||
import email.parser
|
||||
from pprint import pprint
|
||||
from six.moves import StringIO
|
||||
from io import StringIO
|
||||
import click
|
||||
|
||||
all_packages = {} # name -> version
|
||||
|
@ -4,13 +4,5 @@ Monkey-patching of third party libraries.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY2
|
||||
|
||||
|
||||
|
||||
def patch():
|
||||
"""Path third-party libraries to make Tahoe-LAFS work."""
|
||||
|
||||
if not PY2:
|
||||
# Python 3 doesn't need to monkey patch Foolscap
|
||||
return
|
||||
|
@ -10,8 +10,6 @@ objects that `cryptography` documents.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
import six
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.ciphers import (
|
||||
Cipher,
|
||||
@ -79,7 +77,7 @@ def encrypt_data(encryptor, plaintext):
|
||||
"""
|
||||
|
||||
_validate_cryptor(encryptor, encrypt=True)
|
||||
if not isinstance(plaintext, six.binary_type):
|
||||
if not isinstance(plaintext, bytes):
|
||||
raise ValueError('Plaintext must be bytes')
|
||||
|
||||
return encryptor.update(plaintext)
|
||||
@ -118,7 +116,7 @@ def decrypt_data(decryptor, plaintext):
|
||||
"""
|
||||
|
||||
_validate_cryptor(decryptor, encrypt=False)
|
||||
if not isinstance(plaintext, six.binary_type):
|
||||
if not isinstance(plaintext, bytes):
|
||||
raise ValueError('Plaintext must be bytes')
|
||||
|
||||
return decryptor.update(plaintext)
|
||||
@ -160,7 +158,7 @@ def _validate_key(key):
|
||||
"""
|
||||
confirm `key` is suitable for AES encryption, or raise ValueError
|
||||
"""
|
||||
if not isinstance(key, six.binary_type):
|
||||
if not isinstance(key, bytes):
|
||||
raise TypeError('Key must be bytes')
|
||||
if len(key) not in (16, 32):
|
||||
raise ValueError('Key must be 16 or 32 bytes long')
|
||||
@ -175,7 +173,7 @@ def _validate_iv(iv):
|
||||
"""
|
||||
if iv is None:
|
||||
return DEFAULT_IV
|
||||
if not isinstance(iv, six.binary_type):
|
||||
if not isinstance(iv, bytes):
|
||||
raise TypeError('IV must be bytes')
|
||||
if len(iv) != 16:
|
||||
raise ValueError('IV must be 16 bytes long')
|
||||
|
@ -45,9 +45,6 @@ noisy = True
|
||||
from allmydata.util.log import NOISY, OPERATIONAL, WEIRD, \
|
||||
msg as logmsg, PrefixingLogMixin
|
||||
|
||||
if six.PY3:
|
||||
long = int
|
||||
|
||||
|
||||
def createSFTPError(errorCode, errorMessage):
|
||||
"""
|
||||
|
@ -112,8 +112,8 @@ def formatTimeTahoeStyle(self, when):
|
||||
"""
|
||||
d = datetime.datetime.utcfromtimestamp(when)
|
||||
if d.microsecond:
|
||||
return d.isoformat(ensure_str(" "))[:-3]+"Z"
|
||||
return d.isoformat(ensure_str(" ")) + ".000Z"
|
||||
return d.isoformat(" ")[:-3]+"Z"
|
||||
return d.isoformat(" ") + ".000Z"
|
||||
|
||||
PRIV_README = """
|
||||
This directory contains files which contain private data for the Tahoe node,
|
||||
|
@ -3,7 +3,6 @@ Ported to Python 3.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import six
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
|
||||
@ -13,10 +12,10 @@ if sys.platform == 'win32':
|
||||
from allmydata.windows import registry
|
||||
path = registry.get_base_dir_path()
|
||||
if path:
|
||||
precondition(isinstance(path, six.text_type), path)
|
||||
precondition(isinstance(path, str), path)
|
||||
_default_nodedir = abspath_expanduser_unicode(path)
|
||||
|
||||
if _default_nodedir is None:
|
||||
path = abspath_expanduser_unicode(u"~/.tahoe")
|
||||
precondition(isinstance(path, six.text_type), path)
|
||||
path = abspath_expanduser_unicode("~/.tahoe")
|
||||
precondition(isinstance(path, str), path)
|
||||
_default_nodedir = path
|
||||
|
@ -65,8 +65,8 @@ class Options(usage.Options):
|
||||
]
|
||||
optParameters = [
|
||||
["node-directory", "d", None, NODEDIR_HELP],
|
||||
["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", six.text_type],
|
||||
["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", six.text_type],
|
||||
["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", str],
|
||||
["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", str],
|
||||
]
|
||||
|
||||
def opt_version(self):
|
||||
@ -262,7 +262,7 @@ def _setup_coverage(reactor, argv):
|
||||
# can we put this _setup_coverage call after we hit
|
||||
# argument-parsing?
|
||||
# ensure_str() only necessary on Python 2.
|
||||
if six.ensure_str('--coverage') not in sys.argv:
|
||||
if '--coverage' not in sys.argv:
|
||||
return
|
||||
argv.remove('--coverage')
|
||||
|
||||
|
@ -2,8 +2,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY3
|
||||
|
||||
from six import ensure_str
|
||||
|
||||
import os, time
|
||||
@ -81,9 +79,7 @@ class SlowOperationRunner(object):
|
||||
if not data["finished"]:
|
||||
return False
|
||||
if self.options.get("raw"):
|
||||
if PY3:
|
||||
# need to write bytes!
|
||||
stdout = stdout.buffer
|
||||
stdout = stdout.buffer
|
||||
if is_printable_ascii(jdata):
|
||||
stdout.write(jdata)
|
||||
stdout.write(b"\n")
|
||||
|
@ -2,7 +2,7 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from six import ensure_str, ensure_text
|
||||
from six import ensure_text
|
||||
|
||||
from urllib.parse import quote as url_quote
|
||||
import json
|
||||
@ -168,7 +168,7 @@ class DeepCheckOutput(LineOnlyReceiver, object):
|
||||
# LIT files and directories do not have a "summary" field.
|
||||
summary = cr.get("summary", "Healthy (LIT)")
|
||||
# When Python 2 is dropped the ensure_text()/ensure_str() will be unnecessary.
|
||||
print(ensure_text(ensure_str("%s: %s") % (quote_path(path), quote_output(summary, quotemarks=False)),
|
||||
print(ensure_text("%s: %s" % (quote_path(path), quote_output(summary, quotemarks=False)),
|
||||
encoding=get_io_encoding()), file=stdout)
|
||||
|
||||
# always print out corrupt shares
|
||||
@ -246,13 +246,11 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver, object):
|
||||
if not path:
|
||||
path = ["<root>"]
|
||||
# we don't seem to have a summary available, so build one
|
||||
# When Python 2 is dropped the ensure_text/ensure_str crap can be
|
||||
# dropped.
|
||||
if was_healthy:
|
||||
summary = ensure_str("healthy")
|
||||
summary = "healthy"
|
||||
else:
|
||||
summary = ensure_str("not healthy")
|
||||
print(ensure_text(ensure_str("%s: %s") % (quote_path(path), summary),
|
||||
summary = "not healthy"
|
||||
print(ensure_text("%s: %s" % (quote_path(path), summary),
|
||||
encoding=get_io_encoding()), file=stdout)
|
||||
|
||||
# always print out corrupt shares
|
||||
|
@ -2,8 +2,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY3
|
||||
|
||||
from urllib.parse import quote as url_quote
|
||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
UnknownAliasError
|
||||
@ -36,7 +34,7 @@ def get(options):
|
||||
outf = stdout
|
||||
# Make sure we can write bytes; on Python 3 stdout is Unicode by
|
||||
# default.
|
||||
if PY3 and getattr(outf, "encoding", None) is not None:
|
||||
if getattr(outf, "encoding", None) is not None:
|
||||
outf = outf.buffer
|
||||
while True:
|
||||
data = resp.read(4096)
|
||||
|
@ -2,10 +2,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY3
|
||||
|
||||
from six import ensure_str
|
||||
|
||||
from urllib.parse import quote as url_quote
|
||||
import json
|
||||
from twisted.protocols.basic import LineOnlyReceiver
|
||||
@ -56,8 +52,7 @@ class ManifestStreamer(LineOnlyReceiver, object):
|
||||
# use Twisted to split this into lines
|
||||
self.in_error = False
|
||||
# Writing bytes, so need binary stdout.
|
||||
if PY3:
|
||||
stdout = stdout.buffer
|
||||
stdout = stdout.buffer
|
||||
while True:
|
||||
chunk = resp.read(100)
|
||||
if not chunk:
|
||||
@ -99,8 +94,7 @@ class ManifestStreamer(LineOnlyReceiver, object):
|
||||
if vc:
|
||||
print(quote_output(vc, quotemarks=False), file=stdout)
|
||||
else:
|
||||
# ensure_str() only necessary for Python 2.
|
||||
print(ensure_str("%s %s") % (
|
||||
print("%s %s" % (
|
||||
quote_output(d["cap"], quotemarks=False),
|
||||
quote_path(d["path"], quotemarks=False)), file=stdout)
|
||||
|
||||
|
@ -2,8 +2,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY3
|
||||
|
||||
import os.path
|
||||
from allmydata.util import base32
|
||||
|
||||
@ -43,7 +41,5 @@ def storage_index_to_dir(storageindex):
|
||||
Returns native string.
|
||||
"""
|
||||
sia = si_b2a(storageindex)
|
||||
if PY3:
|
||||
# On Python 3 we expect paths to be unicode.
|
||||
sia = sia.decode("ascii")
|
||||
sia = sia.decode("ascii")
|
||||
return os.path.join(sia[:2], sia)
|
||||
|
@ -4,9 +4,6 @@ Crawl the storage server shares.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
|
||||
from future.utils import PY2, PY3
|
||||
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
@ -150,10 +147,7 @@ def _dump_json_to_file(js, afile):
|
||||
"""
|
||||
with afile.open("wb") as f:
|
||||
data = json.dumps(js)
|
||||
if PY2:
|
||||
f.write(data)
|
||||
else:
|
||||
f.write(data.encode("utf8"))
|
||||
f.write(data.encode("utf8"))
|
||||
|
||||
|
||||
class _LeaseStateSerializer(object):
|
||||
@ -249,9 +243,7 @@ class ShareCrawler(service.MultiService):
|
||||
self._state_serializer = _LeaseStateSerializer(statefile)
|
||||
self.prefixes = [si_b2a(struct.pack(">H", i << (16-10)))[:2]
|
||||
for i in range(2**10)]
|
||||
if PY3:
|
||||
# On Python 3 we expect the paths to be unicode, not bytes.
|
||||
self.prefixes = [p.decode("ascii") for p in self.prefixes]
|
||||
self.prefixes = [p.decode("ascii") for p in self.prefixes]
|
||||
self.prefixes.sort()
|
||||
self.timer = None
|
||||
self.bucket_cache = (None, [])
|
||||
|
@ -2,10 +2,8 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY2
|
||||
|
||||
import os.path
|
||||
from six.moves import cStringIO as StringIO
|
||||
from io import StringIO
|
||||
from datetime import timedelta
|
||||
import re
|
||||
|
||||
@ -421,10 +419,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
|
||||
else:
|
||||
return original_open(name, *args, **kwargs)
|
||||
|
||||
if PY2:
|
||||
from allmydata.scripts import cli as module_to_patch
|
||||
else:
|
||||
import builtins as module_to_patch
|
||||
import builtins as module_to_patch
|
||||
patcher = MonkeyPatcher((module_to_patch, 'open', call_file))
|
||||
patcher.runWithPatches(parse_options, basedir, "backup", ['--exclude-from-utf-8', unicode_to_argv(exclude_file), 'from', 'to'])
|
||||
self.failUnless(ns.called)
|
||||
|
@ -4,7 +4,7 @@ Ported to Python 3.
|
||||
|
||||
import sys
|
||||
import os.path, time
|
||||
from six.moves import cStringIO as StringIO
|
||||
from io import StringIO
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.util import fileutil
|
||||
|
@ -3,7 +3,7 @@ from six import ensure_text
|
||||
import os.path
|
||||
import json
|
||||
from twisted.trial import unittest
|
||||
from six.moves import cStringIO as StringIO
|
||||
from io import StringIO
|
||||
|
||||
from allmydata import uri
|
||||
from allmydata.util import base32
|
||||
|
@ -2,7 +2,7 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from six.moves import cStringIO as StringIO
|
||||
from io import StringIO
|
||||
import re
|
||||
from six import ensure_text
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from six.moves import StringIO
|
||||
from io import StringIO
|
||||
import os.path
|
||||
from twisted.trial import unittest
|
||||
from urllib.parse import quote as url_quote
|
||||
|
@ -2,9 +2,6 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY3
|
||||
from six import ensure_str
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
|
||||
@ -26,10 +23,6 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
good_arg = u"g\u00F6\u00F6d"
|
||||
good_out = u"g\u00F6\u00F6d"
|
||||
|
||||
# On Python 2 we get bytes, so we need encoded version. On Python 3
|
||||
# stdio is unicode so can leave unchanged.
|
||||
good_out_encoded = good_out if PY3 else good_out.encode(get_io_encoding())
|
||||
|
||||
d = c0.create_dirnode()
|
||||
def _stash_root_and_create_file(n):
|
||||
self.rootnode = n
|
||||
@ -52,7 +45,7 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
(rc, out, err) = args
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
self.assertEqual(len(err), 0, err)
|
||||
expected = sorted([ensure_str("0share"), ensure_str("1share"), good_out_encoded])
|
||||
expected = sorted(["0share", "1share", good_out])
|
||||
self.assertEqual(sorted(out.splitlines()), expected)
|
||||
d.addCallback(_check1)
|
||||
d.addCallback(lambda ign: self.do_cli("ls", "missing"))
|
||||
@ -85,8 +78,8 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
# listing a file (as dir/filename) should have the edge metadata,
|
||||
# including the filename
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
self.failUnlessIn(good_out_encoded, out)
|
||||
self.failIfIn(ensure_str("-r-- %d -" % len(small)), out,
|
||||
self.failUnlessIn(good_out, out)
|
||||
self.failIfIn("-r-- %d -" % len(small), out,
|
||||
"trailing hyphen means unknown date")
|
||||
|
||||
if good_arg is not None:
|
||||
|
@ -5,7 +5,7 @@ Tests for ``allmydata.scripts.tahoe_run``.
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from six.moves import (
|
||||
from io import (
|
||||
StringIO,
|
||||
)
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY2, PY3, bchr, binary_type
|
||||
from future.utils import bchr
|
||||
from future.builtins import str as future_str
|
||||
|
||||
import os
|
||||
@ -13,8 +13,6 @@ from functools import (
|
||||
partial,
|
||||
)
|
||||
from random import randrange
|
||||
if PY2:
|
||||
from StringIO import StringIO
|
||||
from io import (
|
||||
TextIOWrapper,
|
||||
BytesIO,
|
||||
@ -101,22 +99,7 @@ def run_cli_native(verb, *args, **kwargs):
|
||||
)
|
||||
argv = ["tahoe"] + nodeargs + [verb] + list(args)
|
||||
stdin = kwargs.get("stdin", "")
|
||||
if PY2:
|
||||
# The original behavior, the Python 2 behavior, is to accept either
|
||||
# bytes or unicode and try to automatically encode or decode as
|
||||
# necessary. This works okay for ASCII and if LANG is set
|
||||
# appropriately. These aren't great constraints so we should move
|
||||
# away from this behavior.
|
||||
#
|
||||
# The encoding attribute doesn't change StringIO behavior on Python 2,
|
||||
# but it's there for realism of the emulation.
|
||||
stdin = StringIO(stdin)
|
||||
stdin.encoding = encoding
|
||||
stdout = StringIO()
|
||||
stdout.encoding = encoding
|
||||
stderr = StringIO()
|
||||
stderr.encoding = encoding
|
||||
else:
|
||||
if True:
|
||||
# The new behavior, the Python 3 behavior, is to accept unicode and
|
||||
# encode it using a specific encoding. For older versions of Python 3,
|
||||
# the encoding is determined from LANG (bad) but for newer Python 3,
|
||||
@ -146,13 +129,13 @@ def run_cli_native(verb, *args, **kwargs):
|
||||
stderr=stderr,
|
||||
)
|
||||
def _done(rc, stdout=stdout, stderr=stderr):
|
||||
if return_bytes and PY3:
|
||||
if return_bytes:
|
||||
stdout = stdout.buffer
|
||||
stderr = stderr.buffer
|
||||
return 0, _getvalue(stdout), _getvalue(stderr)
|
||||
def _err(f, stdout=stdout, stderr=stderr):
|
||||
f.trap(SystemExit)
|
||||
if return_bytes and PY3:
|
||||
if return_bytes:
|
||||
stdout = stdout.buffer
|
||||
stderr = stderr.buffer
|
||||
return f.value.code, _getvalue(stdout), _getvalue(stderr)
|
||||
@ -189,11 +172,7 @@ def run_cli_unicode(verb, argv, nodeargs=None, stdin=None, encoding=None):
|
||||
argv=argv,
|
||||
)
|
||||
codec = encoding or "ascii"
|
||||
if PY2:
|
||||
encode = lambda t: None if t is None else t.encode(codec)
|
||||
else:
|
||||
# On Python 3 command-line parsing expects Unicode!
|
||||
encode = lambda t: t
|
||||
encode = lambda t: t
|
||||
d = run_cli_native(
|
||||
encode(verb),
|
||||
nodeargs=list(encode(arg) for arg in nodeargs),
|
||||
@ -238,7 +217,7 @@ def flip_bit(good, which):
|
||||
def flip_one_bit(s, offset=0, size=None):
|
||||
""" flip one random bit of the string s, in a byte greater than or equal to offset and less
|
||||
than offset+size. """
|
||||
precondition(isinstance(s, binary_type))
|
||||
precondition(isinstance(s, bytes))
|
||||
if size is None:
|
||||
size=len(s)-offset
|
||||
i = randrange(offset, offset+size)
|
||||
|
@ -2,7 +2,7 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from six.moves import cStringIO as StringIO
|
||||
from io import StringIO
|
||||
from twisted.internet import defer, reactor
|
||||
from ..common import AsyncBrokenTestCase
|
||||
from testtools.matchers import (
|
||||
|
@ -2,7 +2,7 @@
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from six.moves import cStringIO as StringIO
|
||||
from io import StringIO
|
||||
from ..common import AsyncTestCase
|
||||
from testtools.matchers import Equals, HasLength, Contains
|
||||
from twisted.internet import defer
|
||||
|
@ -2,8 +2,6 @@
|
||||
This module has been ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY2
|
||||
|
||||
import sys
|
||||
import random
|
||||
|
||||
@ -31,7 +29,7 @@ class TestFlipOneBit(SyncTestCase):
|
||||
|
||||
def test_accepts_byte_string(self):
|
||||
actual = flip_one_bit(b'foo')
|
||||
self.assertEqual(actual, b'fno' if PY2 else b'fom')
|
||||
self.assertEqual(actual, b'fom')
|
||||
|
||||
def test_rejects_unicode_string(self):
|
||||
self.assertRaises(AssertionError, flip_one_bit, u'foo')
|
||||
|
@ -5,8 +5,6 @@ Ported to Python 3.
|
||||
"""
|
||||
|
||||
|
||||
from future.utils import PY3
|
||||
|
||||
import time
|
||||
import os.path
|
||||
from twisted.trial import unittest
|
||||
@ -28,10 +26,9 @@ class BucketEnumeratingCrawler(ShareCrawler):
|
||||
self.all_buckets = []
|
||||
self.finished_d = defer.Deferred()
|
||||
def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32):
|
||||
if PY3:
|
||||
# Bucket _inputs_ are bytes, and that's what we will compare this
|
||||
# to:
|
||||
storage_index_b32 = storage_index_b32.encode("ascii")
|
||||
# Bucket _inputs_ are bytes, and that's what we will compare this
|
||||
# to:
|
||||
storage_index_b32 = storage_index_b32.encode("ascii")
|
||||
self.all_buckets.append(storage_index_b32)
|
||||
def finished_cycle(self, cycle):
|
||||
eventually(self.finished_d.callback, None)
|
||||
@ -46,10 +43,9 @@ class PacedCrawler(ShareCrawler):
|
||||
self.finished_d = defer.Deferred()
|
||||
self.yield_cb = None
|
||||
def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32):
|
||||
if PY3:
|
||||
# Bucket _inputs_ are bytes, and that's what we will compare this
|
||||
# to:
|
||||
storage_index_b32 = storage_index_b32.encode("ascii")
|
||||
# Bucket _inputs_ are bytes, and that's what we will compare this
|
||||
# to:
|
||||
storage_index_b32 = storage_index_b32.encode("ascii")
|
||||
self.all_buckets.append(storage_index_b32)
|
||||
self.countdown -= 1
|
||||
if self.countdown == 0:
|
||||
|
@ -1,6 +1,3 @@
|
||||
|
||||
from future.utils import native_bytes
|
||||
|
||||
import unittest
|
||||
|
||||
from base64 import b64decode
|
||||
@ -40,7 +37,7 @@ class TestRegression(unittest.TestCase):
|
||||
# priv_str = b64encode(priv.serialize())
|
||||
# pub_str = b64encode(priv.get_verifying_key().serialize())
|
||||
RSA_2048_PRIV_KEY = b64decode(f.read().strip())
|
||||
assert isinstance(RSA_2048_PRIV_KEY, native_bytes)
|
||||
assert isinstance(RSA_2048_PRIV_KEY, bytes)
|
||||
|
||||
with RESOURCE_DIR.child('pycryptopp-rsa-2048-sig.txt').open('r') as f:
|
||||
# Signature created using `RSA_2048_PRIV_KEY` via:
|
||||
@ -61,7 +58,7 @@ class TestRegression(unittest.TestCase):
|
||||
# priv_str = b64encode(priv.serialize())
|
||||
# pub_str = b64encode(priv.get_verifying_key().serialize())
|
||||
RSA_TINY_PRIV_KEY = b64decode(f.read().strip())
|
||||
assert isinstance(RSA_TINY_PRIV_KEY, native_bytes)
|
||||
assert isinstance(RSA_TINY_PRIV_KEY, bytes)
|
||||
|
||||
with RESOURCE_DIR.child('pycryptopp-rsa-32768-priv.txt').open('r') as f:
|
||||
# Created using `pycryptopp`:
|
||||
@ -72,7 +69,7 @@ class TestRegression(unittest.TestCase):
|
||||
# priv_str = b64encode(priv.serialize())
|
||||
# pub_str = b64encode(priv.get_verifying_key().serialize())
|
||||
RSA_HUGE_PRIV_KEY = b64decode(f.read().strip())
|
||||
assert isinstance(RSA_HUGE_PRIV_KEY, native_bytes)
|
||||
assert isinstance(RSA_HUGE_PRIV_KEY, bytes)
|
||||
|
||||
def test_old_start_up_test(self):
|
||||
"""
|
||||
@ -324,7 +321,7 @@ class TestEd25519(unittest.TestCase):
|
||||
private_key, public_key = ed25519.create_signing_keypair()
|
||||
private_key_str = ed25519.string_from_signing_key(private_key)
|
||||
|
||||
self.assertIsInstance(private_key_str, native_bytes)
|
||||
self.assertIsInstance(private_key_str, bytes)
|
||||
|
||||
private_key2, public_key2 = ed25519.signing_keypair_from_string(private_key_str)
|
||||
|
||||
@ -340,7 +337,7 @@ class TestEd25519(unittest.TestCase):
|
||||
|
||||
# ditto, but for the verifying keys
|
||||
public_key_str = ed25519.string_from_verifying_key(public_key)
|
||||
self.assertIsInstance(public_key_str, native_bytes)
|
||||
self.assertIsInstance(public_key_str, bytes)
|
||||
|
||||
public_key2 = ed25519.verifying_key_from_string(public_key_str)
|
||||
self.assertEqual(
|
||||
@ -444,7 +441,7 @@ class TestRsa(unittest.TestCase):
|
||||
priv_key, pub_key = rsa.create_signing_keypair(2048)
|
||||
priv_key_str = rsa.der_string_from_signing_key(priv_key)
|
||||
|
||||
self.assertIsInstance(priv_key_str, native_bytes)
|
||||
self.assertIsInstance(priv_key_str, bytes)
|
||||
|
||||
priv_key2, pub_key2 = rsa.create_signing_keypair_from_string(priv_key_str)
|
||||
|
||||
|
@ -10,7 +10,6 @@ from future.utils import bchr
|
||||
|
||||
from typing import Any
|
||||
|
||||
import six
|
||||
import os
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, reactor
|
||||
@ -30,9 +29,6 @@ from allmydata.immutable.downloader.fetcher import SegmentFetcher
|
||||
from allmydata.codec import CRSDecoder
|
||||
from foolscap.eventual import eventually, fireEventually, flushEventualQueue
|
||||
|
||||
if six.PY3:
|
||||
long = int
|
||||
|
||||
plaintext = b"This is a moderate-sized file.\n" * 10
|
||||
mutable_plaintext = b"This is a moderate-sized mutable file.\n" * 10
|
||||
|
||||
|
@ -1,8 +1,4 @@
|
||||
|
||||
from future.utils import PY2, PY3
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
lumiere_nfc = u"lumi\u00E8re"
|
||||
Artonwall_nfc = u"\u00C4rtonwall.mp3"
|
||||
Artonwall_nfd = u"A\u0308rtonwall.mp3"
|
||||
@ -46,13 +42,7 @@ if __name__ == "__main__":
|
||||
for fname in TEST_FILENAMES:
|
||||
open(os.path.join(tmpdir, fname), 'w').close()
|
||||
|
||||
# On Python 2, listing directories returns unicode under Windows or
|
||||
# MacOS X if the input is unicode. On Python 3, it always returns
|
||||
# Unicode.
|
||||
if PY2 and sys.platform in ('win32', 'darwin'):
|
||||
dirlist = os.listdir(unicode(tmpdir))
|
||||
else:
|
||||
dirlist = os.listdir(tmpdir)
|
||||
dirlist = os.listdir(tmpdir)
|
||||
|
||||
print(" dirlist = %s" % repr(dirlist))
|
||||
except:
|
||||
@ -64,7 +54,6 @@ if __name__ == "__main__":
|
||||
|
||||
|
||||
import os, sys
|
||||
from unittest import skipIf
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
@ -87,15 +76,6 @@ class MockStdout(object):
|
||||
# The following tests apply only to platforms that don't store filenames as
|
||||
# Unicode entities on the filesystem.
|
||||
class EncodingUtilNonUnicodePlatform(unittest.TestCase):
|
||||
@skipIf(PY3, "Python 3 is always Unicode, regardless of OS.")
|
||||
def setUp(self):
|
||||
# Make sure everything goes back to the way it was at the end of the
|
||||
# test.
|
||||
self.addCleanup(_reload)
|
||||
|
||||
# Mock sys.platform because unicode_platform() uses it. Cleanups run
|
||||
# in reverse order so we do this second so it gets undone first.
|
||||
self.patch(sys, "platform", "linux")
|
||||
|
||||
def test_listdir_unicode(self):
|
||||
# What happens if latin1-encoded filenames are encountered on an UTF-8
|
||||
@ -143,10 +123,7 @@ class EncodingUtil(ReallyEqualMixin):
|
||||
converts to bytes using UTF-8 elsewhere.
|
||||
"""
|
||||
result = unicode_to_argv(lumiere_nfc)
|
||||
if PY3 or self.platform == "win32":
|
||||
expected_value = lumiere_nfc
|
||||
else:
|
||||
expected_value = lumiere_nfc.encode(self.io_encoding)
|
||||
expected_value = lumiere_nfc
|
||||
|
||||
self.assertIsInstance(result, type(expected_value))
|
||||
self.assertEqual(result, expected_value)
|
||||
@ -167,13 +144,10 @@ class EncodingUtil(ReallyEqualMixin):
|
||||
% (self.filesystem_encoding,))
|
||||
|
||||
def call_os_listdir(path):
|
||||
if PY2:
|
||||
return self.dirlist
|
||||
else:
|
||||
# Python 3 always lists unicode filenames:
|
||||
return [d.decode(self.filesystem_encoding) if isinstance(d, bytes)
|
||||
else d
|
||||
for d in self.dirlist]
|
||||
# Python 3 always lists unicode filenames:
|
||||
return [d.decode(self.filesystem_encoding) if isinstance(d, bytes)
|
||||
else d
|
||||
for d in self.dirlist]
|
||||
|
||||
self.patch(os, 'listdir', call_os_listdir)
|
||||
|
||||
@ -204,10 +178,7 @@ class StdlibUnicode(unittest.TestCase):
|
||||
fn = lumiere_nfc + u'/' + lumiere_nfc + u'.txt'
|
||||
open(fn, 'wb').close()
|
||||
self.failUnless(os.path.exists(fn))
|
||||
if PY2:
|
||||
getcwdu = os.getcwdu
|
||||
else:
|
||||
getcwdu = os.getcwd
|
||||
getcwdu = os.getcwd
|
||||
self.failUnless(os.path.exists(os.path.join(getcwdu(), fn)))
|
||||
filenames = listdir_unicode(lumiere_nfc)
|
||||
|
||||
@ -237,7 +208,7 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
|
||||
_reload()
|
||||
|
||||
def _check(self, inp, out, enc, optional_quotes, quote_newlines):
|
||||
if PY3 and isinstance(out, bytes):
|
||||
if isinstance(out, bytes):
|
||||
out = out.decode(enc or encodingutil.io_encoding)
|
||||
out2 = out
|
||||
if optional_quotes:
|
||||
@ -266,9 +237,7 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
def _test_quote_output_all(self, enc):
|
||||
def check(inp, out, optional_quotes=False, quote_newlines=None):
|
||||
if PY3:
|
||||
# Result is always Unicode on Python 3
|
||||
out = out.decode("ascii")
|
||||
out = out.decode("ascii")
|
||||
self._check(inp, out, enc, optional_quotes, quote_newlines)
|
||||
|
||||
# optional single quotes
|
||||
@ -354,9 +323,7 @@ def win32_other(win32, other):
|
||||
class QuotePaths(ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
def assertPathsEqual(self, actual, expected):
|
||||
if PY3:
|
||||
# On Python 3, results should be unicode:
|
||||
expected = expected.decode("ascii")
|
||||
expected = expected.decode("ascii")
|
||||
self.failUnlessReallyEqual(actual, expected)
|
||||
|
||||
def test_quote_path(self):
|
||||
|
@ -6,8 +6,8 @@ import os
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, error
|
||||
from twisted.python.usage import UsageError
|
||||
from six.moves import StringIO
|
||||
import mock
|
||||
from io import StringIO
|
||||
from unittest import mock
|
||||
from ..util import i2p_provider
|
||||
from ..scripts import create_node, runner
|
||||
|
||||
|
@ -2,10 +2,6 @@
|
||||
Ported to Python 3
|
||||
"""
|
||||
|
||||
from future.utils import PY2
|
||||
|
||||
from six import ensure_text
|
||||
|
||||
import os.path, re, sys
|
||||
from os import linesep
|
||||
import locale
|
||||
@ -129,18 +125,14 @@ def run_bintahoe(extra_argv, python_options=None):
|
||||
:return: A three-tuple of stdout (unicode), stderr (unicode), and the
|
||||
child process "returncode" (int).
|
||||
"""
|
||||
executable = ensure_text(sys.executable)
|
||||
argv = [executable]
|
||||
argv = [sys.executable]
|
||||
if python_options is not None:
|
||||
argv.extend(python_options)
|
||||
argv.extend([u"-b", u"-m", u"allmydata.scripts.runner"])
|
||||
argv.extend(extra_argv)
|
||||
argv = list(unicode_to_argv(arg) for arg in argv)
|
||||
p = Popen(argv, stdout=PIPE, stderr=PIPE)
|
||||
if PY2:
|
||||
encoding = "utf-8"
|
||||
else:
|
||||
encoding = locale.getpreferredencoding(False)
|
||||
encoding = locale.getpreferredencoding(False)
|
||||
out = p.stdout.read().decode(encoding)
|
||||
err = p.stderr.read().decode(encoding)
|
||||
returncode = p.wait()
|
||||
@ -154,10 +146,7 @@ class BinTahoe(common_util.SignalMixin, unittest.TestCase):
|
||||
"""
|
||||
tricky = u"\u00F6"
|
||||
out, err, returncode = run_bintahoe([tricky])
|
||||
if PY2:
|
||||
expected = u"Unknown command: \\xf6"
|
||||
else:
|
||||
expected = u"Unknown command: \xf6"
|
||||
expected = u"Unknown command: \xf6"
|
||||
self.assertEqual(returncode, 1)
|
||||
self.assertIn(
|
||||
expected,
|
||||
|
@ -4,7 +4,7 @@ Tests for allmydata.util.statistics.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from six.moves import StringIO # native string StringIO
|
||||
from io import StringIO
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
|
@ -9,7 +9,7 @@ import os.path
|
||||
import re
|
||||
import json
|
||||
from unittest import skipIf
|
||||
from six.moves import StringIO
|
||||
from io import StringIO
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
|
@ -5,9 +5,8 @@ Ported to Python 3.
|
||||
import os
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, error
|
||||
from six.moves import StringIO
|
||||
from six import ensure_str
|
||||
import mock
|
||||
from io import StringIO
|
||||
from unittest import mock
|
||||
from ..util import tor_provider
|
||||
from ..scripts import create_node, runner
|
||||
from foolscap.eventual import flushEventualQueue
|
||||
@ -185,7 +184,7 @@ class CreateOnion(unittest.TestCase):
|
||||
txtorcon = mock.Mock()
|
||||
ehs = mock.Mock()
|
||||
# This appears to be a native string in the real txtorcon object...
|
||||
ehs.private_key = ensure_str("privkey")
|
||||
ehs.private_key = "privkey"
|
||||
ehs.hostname = "ONION.onion"
|
||||
txtorcon.EphemeralHiddenService = mock.Mock(return_value=ehs)
|
||||
ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None))
|
||||
|
@ -2,7 +2,6 @@
|
||||
Ported to Python3.
|
||||
"""
|
||||
|
||||
import six
|
||||
import os, time, sys
|
||||
import yaml
|
||||
import json
|
||||
@ -22,8 +21,7 @@ from allmydata.util.cputhreadpool import defer_to_thread, disable_thread_pool_fo
|
||||
from allmydata.test.common_util import ReallyEqualMixin
|
||||
from .no_network import fireNow, LocalWrapper
|
||||
|
||||
if six.PY3:
|
||||
long = int
|
||||
long = int
|
||||
|
||||
|
||||
class IDLib(unittest.TestCase):
|
||||
@ -477,7 +475,7 @@ class YAML(unittest.TestCase):
|
||||
Unicode and (ASCII) native strings get roundtripped to Unicode strings.
|
||||
"""
|
||||
data = yaml.safe_dump(
|
||||
[six.ensure_str("str"), u"unicode", u"\u1234nicode"]
|
||||
["str", "unicode", "\u1234nicode"]
|
||||
)
|
||||
back = yamlutil.safe_load(data)
|
||||
self.assertIsInstance(back[0], str)
|
||||
|
@ -5,7 +5,7 @@ Ported to Python 3.
|
||||
import os.path, re
|
||||
from urllib.parse import quote as url_quote
|
||||
import json
|
||||
from six.moves import StringIO
|
||||
from io import StringIO
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
@ -4,15 +4,8 @@ Base62 encoding.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY2
|
||||
|
||||
if PY2:
|
||||
import string
|
||||
maketrans = string.maketrans
|
||||
translate = string.translate
|
||||
else:
|
||||
maketrans = bytes.maketrans
|
||||
translate = bytes.translate
|
||||
maketrans = bytes.maketrans
|
||||
translate = bytes.translate
|
||||
|
||||
from past.builtins import chr as byteschr
|
||||
|
||||
|
@ -8,7 +8,7 @@ Once Python 2 support is dropped, most of this module will obsolete, since
|
||||
Unicode is the default everywhere in Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY3, native_str
|
||||
from future.utils import native_str
|
||||
from future.builtins import str as future_str
|
||||
|
||||
from past.builtins import unicode
|
||||
@ -56,25 +56,13 @@ def check_encoding(encoding):
|
||||
io_encoding = "utf-8"
|
||||
|
||||
filesystem_encoding = None
|
||||
is_unicode_platform = False
|
||||
use_unicode_filepath = False
|
||||
is_unicode_platform = True
|
||||
use_unicode_filepath = True
|
||||
|
||||
def _reload():
|
||||
global filesystem_encoding, is_unicode_platform, use_unicode_filepath
|
||||
|
||||
global filesystem_encoding
|
||||
filesystem_encoding = canonical_encoding(sys.getfilesystemencoding())
|
||||
check_encoding(filesystem_encoding)
|
||||
is_unicode_platform = PY3 or sys.platform in ["win32", "darwin"]
|
||||
|
||||
# Despite the Unicode-mode FilePath support added to Twisted in
|
||||
# <https://twistedmatrix.com/trac/ticket/7805>, we can't yet use
|
||||
# Unicode-mode FilePaths with INotify on non-Windows platforms due to
|
||||
# <https://twistedmatrix.com/trac/ticket/7928>. Supposedly 7928 is fixed,
|
||||
# though... and Tahoe-LAFS doesn't use inotify anymore!
|
||||
#
|
||||
# In the interest of not breaking anything, this logic is unchanged for
|
||||
# Python 2, but on Python 3 the paths are always unicode, like it or not.
|
||||
use_unicode_filepath = PY3 or sys.platform == "win32"
|
||||
|
||||
_reload()
|
||||
|
||||
@ -128,9 +116,7 @@ def unicode_to_argv(s):
|
||||
Windows, this returns the input unmodified.
|
||||
"""
|
||||
precondition(isinstance(s, unicode), s)
|
||||
if PY3:
|
||||
warnings.warn("This will be unnecessary once Python 2 is dropped.",
|
||||
DeprecationWarning)
|
||||
warnings.warn("This is unnecessary.", DeprecationWarning)
|
||||
if sys.platform == "win32":
|
||||
return s
|
||||
return ensure_str(s)
|
||||
@ -184,24 +170,8 @@ def unicode_to_output(s):
|
||||
the responsibility of stdout/stderr, they expect Unicode by default.
|
||||
"""
|
||||
precondition(isinstance(s, unicode), s)
|
||||
if PY3:
|
||||
warnings.warn("This will be unnecessary once Python 2 is dropped.",
|
||||
DeprecationWarning)
|
||||
return s
|
||||
|
||||
try:
|
||||
out = s.encode(io_encoding)
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0,
|
||||
native_str("A string could not be encoded as %s for output to the terminal:\n%r" %
|
||||
(io_encoding, repr(s))))
|
||||
|
||||
if PRINTABLE_8BIT.search(out) is None:
|
||||
raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0,
|
||||
native_str("A string encoded as %s for output to the terminal contained unsafe bytes:\n%r" %
|
||||
(io_encoding, repr(s))))
|
||||
return out
|
||||
|
||||
warnings.warn("This is unnecessary.", DeprecationWarning)
|
||||
return s
|
||||
|
||||
def _unicode_escape(m, quote_newlines):
|
||||
u = m.group(0)
|
||||
@ -303,20 +273,7 @@ def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
|
||||
return b'"%s"' % (escaped.encode(encoding, 'backslashreplace'),)
|
||||
|
||||
result = _encode(s)
|
||||
if PY3:
|
||||
# On Python 3 half of what this function does is unnecessary, since
|
||||
# sys.stdout typically expects Unicode. To ensure no encode errors, one
|
||||
# can do:
|
||||
#
|
||||
# sys.stdout.reconfigure(encoding=sys.stdout.encoding, errors="backslashreplace")
|
||||
#
|
||||
# Although the problem is that doesn't work in Python 3.6, only 3.7 or
|
||||
# later... For now not thinking about it, just returning unicode since
|
||||
# that is the right thing to do on Python 3.
|
||||
#
|
||||
# Now that Python 3.7 is the minimum, this can in theory be done:
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3866
|
||||
result = result.decode(encoding)
|
||||
result = result.decode(encoding)
|
||||
return result
|
||||
|
||||
|
||||
|
@ -4,13 +4,6 @@ Hashing utilities.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# Don't import bytes to prevent leaking future's bytes.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min, bytes as future_bytes # noqa: F401
|
||||
else:
|
||||
future_bytes = bytes
|
||||
|
||||
from past.builtins import chr as byteschr
|
||||
|
||||
import os
|
||||
@ -246,7 +239,7 @@ def bucket_cancel_secret_hash(file_cancel_secret, peerid):
|
||||
|
||||
|
||||
def _xor(a, b):
|
||||
return b"".join([byteschr(c ^ b) for c in future_bytes(a)])
|
||||
return b"".join([byteschr(c ^ b) for c in bytes(a)])
|
||||
|
||||
|
||||
def hmac(tag, data):
|
||||
|
@ -4,7 +4,6 @@ Logging utilities.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from future.utils import PY2
|
||||
from six import ensure_str
|
||||
|
||||
from pyutil import nummedobj
|
||||
@ -12,14 +11,10 @@ from pyutil import nummedobj
|
||||
from foolscap.logging import log
|
||||
from twisted.python import log as tw_log
|
||||
|
||||
if PY2:
|
||||
def bytes_to_unicode(ign, obj):
|
||||
return obj
|
||||
else:
|
||||
# We want to convert bytes keys to Unicode, otherwise JSON serialization
|
||||
# inside foolscap will fail (for details see
|
||||
# https://github.com/warner/foolscap/issues/88)
|
||||
from .jsonbytes import bytes_to_unicode
|
||||
# We want to convert bytes keys to Unicode, otherwise JSON serialization
|
||||
# inside foolscap will fail (for details see
|
||||
# https://github.com/warner/foolscap/issues/88)
|
||||
from .jsonbytes import bytes_to_unicode
|
||||
|
||||
|
||||
NOISY = log.NOISY # 10
|
||||
|
@ -1,7 +1,3 @@
|
||||
|
||||
from future.utils import PY3
|
||||
from past.builtins import unicode
|
||||
|
||||
# This code isn't loadable or sensible except on Windows. Importers all know
|
||||
# this and are careful. Normally I would just let an import error from ctypes
|
||||
# explain any mistakes but Mypy also needs some help here. This assert
|
||||
@ -123,82 +119,6 @@ def initialize():
|
||||
|
||||
SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOOPENFILEERRORBOX)
|
||||
|
||||
if PY3:
|
||||
# The rest of this appears to be Python 2-specific
|
||||
return
|
||||
|
||||
original_stderr = sys.stderr
|
||||
|
||||
# If any exception occurs in this code, we'll probably try to print it on stderr,
|
||||
# which makes for frustrating debugging if stderr is directed to our wrapper.
|
||||
# So be paranoid about catching errors and reporting them to original_stderr,
|
||||
# so that we can at least see them.
|
||||
def _complain(output_file, message):
|
||||
print(isinstance(message, str) and message or repr(message), file=output_file)
|
||||
log.msg(message, level=log.WEIRD)
|
||||
|
||||
_complain = partial(_complain, original_stderr)
|
||||
|
||||
# Work around <http://bugs.python.org/issue6058>.
|
||||
codecs.register(lambda name: name == 'cp65001' and codecs.lookup('utf-8') or None)
|
||||
|
||||
# Make Unicode console output work independently of the current code page.
|
||||
# This also fixes <http://bugs.python.org/issue1602>.
|
||||
# Credit to Michael Kaplan <https://blogs.msdn.com/b/michkap/archive/2010/04/07/9989346.aspx>
|
||||
# and TZOmegaTZIOY
|
||||
# <http://stackoverflow.com/questions/878972/windows-cmd-encoding-change-causes-python-crash/1432462#1432462>.
|
||||
try:
|
||||
old_stdout_fileno = None
|
||||
old_stderr_fileno = None
|
||||
if hasattr(sys.stdout, 'fileno'):
|
||||
old_stdout_fileno = sys.stdout.fileno()
|
||||
if hasattr(sys.stderr, 'fileno'):
|
||||
old_stderr_fileno = sys.stderr.fileno()
|
||||
|
||||
real_stdout = (old_stdout_fileno == STDOUT_FILENO)
|
||||
real_stderr = (old_stderr_fileno == STDERR_FILENO)
|
||||
|
||||
if real_stdout:
|
||||
hStdout = GetStdHandle(STD_OUTPUT_HANDLE)
|
||||
if not a_console(hStdout):
|
||||
real_stdout = False
|
||||
|
||||
if real_stderr:
|
||||
hStderr = GetStdHandle(STD_ERROR_HANDLE)
|
||||
if not a_console(hStderr):
|
||||
real_stderr = False
|
||||
|
||||
if real_stdout:
|
||||
sys.stdout = UnicodeOutput(hStdout, None, STDOUT_FILENO, '<Unicode console stdout>', _complain)
|
||||
else:
|
||||
sys.stdout = UnicodeOutput(None, sys.stdout, old_stdout_fileno, '<Unicode redirected stdout>', _complain)
|
||||
|
||||
if real_stderr:
|
||||
sys.stderr = UnicodeOutput(hStderr, None, STDERR_FILENO, '<Unicode console stderr>', _complain)
|
||||
else:
|
||||
sys.stderr = UnicodeOutput(None, sys.stderr, old_stderr_fileno, '<Unicode redirected stderr>', _complain)
|
||||
except Exception as e:
|
||||
_complain("exception %r while fixing up sys.stdout and sys.stderr" % (e,))
|
||||
|
||||
argv = list(arg.encode("utf-8") for arg in get_argv())
|
||||
|
||||
# Take only the suffix with the same number of arguments as sys.argv.
|
||||
# This accounts for anything that can cause initial arguments to be stripped,
|
||||
# for example, the Python interpreter or any options passed to it, or runner
|
||||
# scripts such as 'coverage run'. It works even if there are no such arguments,
|
||||
# as in the case of a frozen executable created by bb-freeze or similar.
|
||||
#
|
||||
# Also, modify sys.argv in place. If any code has already taken a
|
||||
# reference to the original argument list object then this ensures that
|
||||
# code sees the new values. This reliance on mutation of shared state is,
|
||||
# of course, awful. Why does this function even modify sys.argv? Why not
|
||||
# have a function that *returns* the properly initialized argv as a new
|
||||
# list? I don't know.
|
||||
#
|
||||
# At least Python 3 gets sys.argv correct so before very much longer we
|
||||
# should be able to fix this bad design by deleting it.
|
||||
sys.argv[:] = argv[-len(sys.argv):]
|
||||
|
||||
|
||||
def a_console(handle):
|
||||
"""
|
||||
@ -274,13 +194,13 @@ class UnicodeOutput(object):
|
||||
# There is no Windows console available. That means we are
|
||||
# responsible for encoding the unicode to a byte string to
|
||||
# write it to a Python file object.
|
||||
if isinstance(text, unicode):
|
||||
if isinstance(text, str):
|
||||
text = text.encode('utf-8')
|
||||
self._stream.write(text)
|
||||
else:
|
||||
# There is a Windows console available. That means Windows is
|
||||
# responsible for dealing with the unicode itself.
|
||||
if not isinstance(text, unicode):
|
||||
if not isinstance(text, str):
|
||||
text = str(text).decode('utf-8')
|
||||
remaining = len(text)
|
||||
while remaining > 0:
|
||||
|
Loading…
Reference in New Issue
Block a user