Merge pull request #1367 from meejah/remove-future--a-detiste

Remove future  (a-detiste)
This commit is contained in:
Itamar Turner-Trauring 2024-08-14 10:24:10 -04:00 committed by GitHub
commit 912662d3f7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 79 additions and 117 deletions

View File

@ -40,7 +40,7 @@ NURLs
The authentication and authorization properties of fURLs are a good fit for Tahoe-LAFS' requirements.
These are not inherently tied to the Foolscap protocol itself.
In particular they are beneficial to :doc:`../proposed/http-storage-node-protocol` which uses HTTP instead of Foolscap.
In particular they are beneficial to :doc:`http-storage-node-protocol` which uses HTTP instead of Foolscap.
It is conceivable they will also be used with WebSockets at some point as well.
Continuing to refer to these URLs as fURLs when they are being used for other protocols may cause confusion.

View File

@ -0,0 +1 @@

View File

@ -62,7 +62,8 @@ install_requires = [
# Twisted[conch] also depends on cryptography and Twisted[tls]
# transitively depends on cryptography. So it's anyone's guess what
# version of cryptography will *really* be installed.
"cryptography >= 2.6",
# * cryptography 43.0.0 makes __provides__ read-only; see ticket 4300
"cryptography >= 2.6, < 43.0.0",
# * Used for custom HTTPS validation
"pyOpenSSL >= 23.2.0",

View File

@ -3,8 +3,6 @@
Ported to Python 3.
"""
from past.builtins import unicode
import time
from zope.interface import implementer
@ -39,31 +37,29 @@ from eliot.twisted import (
)
NAME = Field.for_types(
u"name",
# Make sure this works on Python 2; with str, it gets Future str which
# breaks Eliot.
[unicode],
u"The name linking the parent to this node.",
"name",
[str],
"The name linking the parent to this node.",
)
METADATA = Field.for_types(
u"metadata",
"metadata",
[dict],
u"Data about a node.",
"Data about a node.",
)
OVERWRITE = Field.for_types(
u"overwrite",
"overwrite",
[bool],
u"True to replace an existing file of the same name, "
u"false to fail with a collision error.",
"True to replace an existing file of the same name, "
"false to fail with a collision error.",
)
ADD_FILE = ActionType(
u"dirnode:add-file",
"dirnode:add-file",
[NAME, METADATA, OVERWRITE],
[],
u"Add a new file as a child of a directory.",
"Add a new file as a child of a directory.",
)

View File

@ -2,8 +2,6 @@
Ported to Python 3.
"""
from past.utils import old_div
import struct
from allmydata.mutable.common import NeedMoreDataError, UnknownVersionError, \
BadShareError
@ -260,7 +258,7 @@ class SDMFSlotWriteProxy(object):
self._required_shares)
assert expected_segment_size == segment_size
self._block_size = old_div(self._segment_size, self._required_shares)
self._block_size = self._segment_size // self._required_shares
# This is meant to mimic how SDMF files were built before MDMF
# entered the picture: we generate each share in its entirety,
@ -793,7 +791,7 @@ class MDMFSlotWriteProxy(object):
# and also because it provides a useful amount of bounds checking.
self._num_segments = mathutil.div_ceil(self._data_length,
self._segment_size)
self._block_size = old_div(self._segment_size, self._required_shares)
self._block_size = self._segment_size // self._required_shares
# We also calculate the share size, to help us with block
# constraints later.
tail_size = self._data_length % self._segment_size
@ -802,7 +800,7 @@ class MDMFSlotWriteProxy(object):
else:
self._tail_block_size = mathutil.next_multiple(tail_size,
self._required_shares)
self._tail_block_size = old_div(self._tail_block_size, self._required_shares)
self._tail_block_size = self._tail_block_size // self._required_shares
# We already know where the sharedata starts; right after the end
# of the header (which is defined as the signable part + the offsets)
@ -1324,7 +1322,7 @@ class MDMFSlotReadProxy(object):
self._segment_size = segsize
self._data_length = datalen
self._block_size = old_div(self._segment_size, self._required_shares)
self._block_size = self._segment_size // self._required_shares
# We can upload empty files, and need to account for this fact
# so as to avoid zero-division and zero-modulo errors.
if datalen > 0:
@ -1336,7 +1334,7 @@ class MDMFSlotReadProxy(object):
else:
self._tail_block_size = mathutil.next_multiple(tail_size,
self._required_shares)
self._tail_block_size = old_div(self._tail_block_size, self._required_shares)
self._tail_block_size = self._tail_block_size // self._required_shares
return encoding_parameters

View File

@ -2,9 +2,6 @@
Ported to Python 3.
"""
from future.utils import bytes_to_native_str
import os, stat, struct, time
from collections_extended import RangeMap
@ -534,9 +531,7 @@ class BucketReader(object):
def __repr__(self):
return "<%s %s %s>" % (self.__class__.__name__,
bytes_to_native_str(
base32.b2a(self.storage_index[:8])[:12]
),
base32.b2a(self.storage_index[:8])[:12].decode(),
self.shnum)
def read(self, offset, length):

View File

@ -3,7 +3,6 @@ Ported to Python 3.
"""
from __future__ import annotations
from future.utils import bytes_to_native_str
from typing import Iterable, Any
import os, re
@ -789,7 +788,7 @@ class StorageServer(service.MultiService):
report_path = get_corruption_report_path(
self.corruption_advisory_dir,
now,
si_s,
si_s.decode("utf8"),
shnum,
)
with open(report_path, "w", encoding="utf-8") as f:
@ -905,7 +904,12 @@ share_number: {share_number}
"""
def render_corruption_report(share_type, si_s, shnum, reason):
def render_corruption_report(
share_type: bytes,
si_s: bytes,
shnum: int,
reason: bytes
) -> str:
"""
Create a string that explains a corruption report using freeform text.
@ -920,13 +924,18 @@ def render_corruption_report(share_type, si_s, shnum, reason):
report.
"""
return CORRUPTION_REPORT_FORMAT.format(
type=bytes_to_native_str(share_type),
storage_index=bytes_to_native_str(si_s),
type=share_type.decode(),
storage_index=si_s.decode(),
share_number=shnum,
reason=bytes_to_native_str(reason),
reason=reason.decode(),
)
def get_corruption_report_path(base_dir, now, si_s, shnum):
def get_corruption_report_path(
base_dir: str,
now: str,
si_s: str,
shnum: int
) -> str:
"""
Determine the path to which a certain corruption report should be written.
@ -944,5 +953,5 @@ def get_corruption_report_path(base_dir, now, si_s, shnum):
# windows can't handle colons in the filename
return os.path.join(
base_dir,
("%s--%s-%d" % (now, str(si_s, "utf-8"), shnum)).replace(":","")
("%s--%s-%d" % (now, si_s, shnum)).replace(":","")
)

View File

@ -2,7 +2,7 @@
Ported to Python 3.
"""
from past.builtins import chr as byteschr, long
from past.builtins import chr as byteschr
from zope.interface import implementer
from twisted.trial import unittest
@ -99,7 +99,7 @@ class FakeBucketReaderWriterProxy(object):
def get_block_data(self, blocknum, blocksize, size):
d = self._start()
def _try(unused=None):
assert isinstance(blocknum, (int, long))
assert isinstance(blocknum, int)
if self.mode == "bad block":
return flip_bit(self.blocks[blocknum])
return self.blocks[blocknum]

View File

@ -343,8 +343,7 @@ class FilePaths(ReallyEqualMixin, unittest.TestCase):
for fp in (nosep_fp, sep_fp):
self.failUnlessReallyEqual(fp, FilePath(foo_u))
if encodingutil.use_unicode_filepath:
self.failUnlessReallyEqual(fp.path, foo_u)
self.failUnlessReallyEqual(fp.path, foo_u)
if sys.platform == "win32":
long_u = u'\\\\?\\C:\\foo'
@ -360,8 +359,7 @@ class FilePaths(ReallyEqualMixin, unittest.TestCase):
for foo_fp in (foo_bfp, foo_ufp):
fp = extend_filepath(foo_fp, [u'bar', u'baz'])
self.failUnlessReallyEqual(fp, FilePath(foo_bar_baz_u))
if encodingutil.use_unicode_filepath:
self.failUnlessReallyEqual(fp.path, foo_bar_baz_u)
self.failUnlessReallyEqual(fp.path, foo_bar_baz_u)
def test_unicode_from_filepath(self):
foo_bfp = FilePath(win32_other(b'C:\\foo', b'/foo'))

View File

@ -4,9 +4,6 @@ Tests for allmydata.util.log.
Ported to Python 3.
"""
from future.utils import native_str
from twisted.trial import unittest
from twisted.python.failure import Failure
@ -161,4 +158,4 @@ class Log(unittest.TestCase):
obj.log(**{"my": "message"})
for message in self.messages:
for k in message[-1].keys():
self.assertIsInstance(k, native_str)
self.assertIsInstance(k, str)

View File

@ -5,7 +5,7 @@ Ported to Python 3.
"""
from __future__ import annotations
from future.utils import native_str, bytes_to_native_str, bchr
from future.utils import bchr
from six import ensure_str
from io import (
@ -109,7 +109,7 @@ class UtilTests(SyncTestCase):
path = storage_index_to_dir(s)
parts = os.path.split(path)
self.assertThat(parts[0], Equals(parts[1][:2]))
self.assertThat(path, IsInstance(native_str))
self.assertThat(path, IsInstance(str))
def test_get_share_file_mutable(self):
"""A mutable share is identified by get_share_file()."""
@ -1242,7 +1242,7 @@ class Server(AsyncTestCase):
reports = os.listdir(reportdir)
self.assertThat(reports, HasLength(2))
report_si1 = [r for r in reports if bytes_to_native_str(si1_s) in r][0]
report_si1 = [r for r in reports if si1_s.decode() in r][0]
f = open(os.path.join(reportdir, report_si1), "rb")
report = f.read()
f.close()
@ -1809,10 +1809,10 @@ class MutableServer(SyncTestCase):
self.assertThat(readv(b"si1", [], [(0,10)]),
Equals({}))
# and the bucket directory should now be gone
si = base32.b2a(b"si1")
si = base32.b2a(b"si1").decode()
# note: this is a detail of the storage server implementation, and
# may change in the future
si = bytes_to_native_str(si) # filesystem paths are native strings
# filesystem paths are native strings
prefix = si[:2]
prefixdir = os.path.join(self.workdir("test_remove"), "shares", prefix)
bucketdir = os.path.join(prefixdir, si)

View File

@ -3,7 +3,7 @@ Ported to Python 3.
"""
from __future__ import annotations
from past.builtins import chr as byteschr, long
from past.builtins import chr as byteschr
from six import ensure_text
import os, re, sys, time, json
@ -395,7 +395,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
# this is really bytes received rather than sent, but it's
# convenient and basically measures the same thing
bytes_sent = results.get_ciphertext_fetched()
self.failUnless(isinstance(bytes_sent, (int, long)), bytes_sent)
self.failUnless(isinstance(bytes_sent, int), bytes_sent)
# We currently don't support resumption of upload if the data is
# encrypted with a random key. (Because that would require us

View File

@ -32,10 +32,8 @@ from zope.interface import (
)
import attr
from attr.validators import (
optional,
provides,
)
from attr.validators import optional
from allmydata.util.attrs_provides import provides
from twisted.internet import reactor
from eliot import (
ILogger,

View File

@ -8,7 +8,6 @@ Once Python 2 support is dropped, most of this module will obsolete, since
Unicode is the default everywhere in Python 3.
"""
from past.builtins import unicode
from six import ensure_str
import sys, os, re
@ -53,8 +52,6 @@ def check_encoding(encoding):
io_encoding = "utf-8"
filesystem_encoding = None
is_unicode_platform = True
use_unicode_filepath = True
def _reload():
global filesystem_encoding
@ -82,13 +79,13 @@ def argv_to_unicode(s):
This is the inverse of ``unicode_to_argv``.
"""
if isinstance(s, unicode):
if isinstance(s, str):
return s
precondition(isinstance(s, bytes), s)
try:
return unicode(s, io_encoding)
return str(s, io_encoding)
except UnicodeDecodeError:
raise usage.UsageError("Argument %s cannot be decoded as %s." %
(quote_output(s), io_encoding))
@ -112,7 +109,7 @@ def unicode_to_argv(s):
On Python 2 on POSIX, this encodes using UTF-8. On Python 3 and on
Windows, this returns the input unmodified.
"""
precondition(isinstance(s, unicode), s)
precondition(isinstance(s, str), s)
warnings.warn("This is unnecessary.", DeprecationWarning)
if sys.platform == "win32":
return s
@ -166,7 +163,7 @@ def unicode_to_output(s):
On Python 3 just returns the unicode string unchanged, since encoding is
the responsibility of stdout/stderr, they expect Unicode by default.
"""
precondition(isinstance(s, unicode), s)
precondition(isinstance(s, str), s)
warnings.warn("This is unnecessary.", DeprecationWarning)
return s
@ -214,7 +211,7 @@ def quote_output_u(*args, **kwargs):
Like ``quote_output`` but always return ``unicode``.
"""
result = quote_output(*args, **kwargs)
if isinstance(result, unicode):
if isinstance(result, str):
return result
# Since we're quoting, the assumption is this will be read by a human, and
# therefore printed, so stdout's encoding is the plausible one. io_encoding
@ -239,7 +236,7 @@ def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
On Python 3, returns Unicode strings.
"""
precondition(isinstance(s, (bytes, unicode)), s)
precondition(isinstance(s, (bytes, str)), s)
# Since we're quoting, the assumption is this will be read by a human, and
# therefore printed, so stdout's encoding is the plausible one. io_encoding
# is now always utf-8.
@ -278,7 +275,7 @@ def quote_path(path, quotemarks=True):
return quote_output(b"/".join(map(to_bytes, path)), quotemarks=quotemarks, quote_newlines=True)
def quote_local_unicode_path(path, quotemarks=True):
precondition(isinstance(path, unicode), path)
precondition(isinstance(path, str), path)
if sys.platform == "win32" and path.startswith(u"\\\\?\\"):
path = path[4 :]
@ -298,20 +295,13 @@ def extend_filepath(fp, segments):
for segment in segments:
fp = fp.child(segment)
if isinstance(fp.path, unicode) and not use_unicode_filepath:
return FilePath(fp.path.encode(filesystem_encoding))
else:
return fp
return fp
def to_filepath(path):
precondition(isinstance(path, unicode if use_unicode_filepath else (bytes, unicode)),
path=path)
if isinstance(path, unicode) and not use_unicode_filepath:
path = path.encode(filesystem_encoding)
precondition(isinstance(path, str), path=path)
if sys.platform == "win32":
_assert(isinstance(path, unicode), path=path)
_assert(isinstance(path, str), path=path)
if path.startswith(u"\\\\?\\") and len(path) > 4:
# FilePath normally strips trailing path separators, but not in this case.
path = path.rstrip(u"\\")
@ -319,7 +309,7 @@ def to_filepath(path):
return FilePath(path)
def _decode(s):
precondition(isinstance(s, (bytes, unicode)), s=s)
precondition(isinstance(s, (bytes, str)), s=s)
if isinstance(s, bytes):
return s.decode(filesystem_encoding)
@ -340,7 +330,7 @@ def unicode_platform():
"""
Does the current platform handle Unicode filenames natively?
"""
return is_unicode_platform
return True
class FilenameEncodingError(Exception):
"""
@ -349,39 +339,13 @@ class FilenameEncodingError(Exception):
"""
pass
def listdir_unicode_fallback(path):
"""
This function emulates a fallback Unicode API similar to one available
under Windows or MacOS X.
If badly encoded filenames are encountered, an exception is raised.
"""
precondition(isinstance(path, unicode), path)
try:
byte_path = path.encode(filesystem_encoding)
except (UnicodeEncodeError, UnicodeDecodeError):
raise FilenameEncodingError(path)
try:
return [unicode(fn, filesystem_encoding) for fn in os.listdir(byte_path)]
except UnicodeDecodeError as e:
raise FilenameEncodingError(e.object)
def listdir_unicode(path):
"""
Wrapper around listdir() which provides safe access to the convenient
Unicode API even under platforms that don't provide one natively.
"""
precondition(isinstance(path, unicode), path)
# On Windows and MacOS X, the Unicode API is used
# On other platforms (ie. Unix systems), the byte-level API is used
if is_unicode_platform:
return os.listdir(path)
else:
return listdir_unicode_fallback(path)
precondition(isinstance(path, str), path)
return os.listdir(path)
def listdir_filepath(fp):
return listdir_unicode(unicode_from_filepath(fp))

View File

@ -2,8 +2,6 @@
Utilities for getting IP addresses.
"""
from future.utils import native_str
from typing import Callable
import os, socket
@ -104,7 +102,7 @@ def get_local_addresses_sync():
on the local system.
"""
return list(
native_str(address["addr"])
str(address["addr"])
for iface_name
in interfaces()
for address

View File

@ -5,22 +5,29 @@ ISO-8601:
http://www.cl.cam.ac.uk/~mgk25/iso-time.html
"""
from future.utils import native_str
import calendar, datetime, re, time
from typing import Optional
def format_time(t):
return time.strftime("%Y-%m-%d %H:%M:%S", t)
def iso_utc_date(now=None, t=time.time):
def iso_utc_date(
now: Optional[float] = None,
t=time.time
) -> str:
if now is None:
now = t()
return datetime.datetime.utcfromtimestamp(now).isoformat()[:10]
def iso_utc(now=None, sep='_', t=time.time):
def iso_utc(
now: Optional[float] = None,
sep: str = '_',
t=time.time
) -> str:
if now is None:
now = t()
sep = native_str(sep) # Python 2 doesn't allow unicode input to isoformat
sep = str(sep) # should already be a str
return datetime.datetime.utcfromtimestamp(now).isoformat(sep)
def iso_utc_time_to_seconds(isotime, _conversion_re=re.compile(r"(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})[T_ ](?P<hour>\d{2}):(?P<minute>\d{2}):(?P<second>\d{2})(?P<subsecond>\.\d+)?")):

View File

@ -129,7 +129,7 @@ deps =
types-mock
types-six
types-PyYAML
types-pkg_resources
types-setuptools
types-pyOpenSSL
foolscap
# Upgrade when new releases come out: