mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-24 07:06:41 +00:00
remove more Python2: unicode -> str, long -> int
This commit is contained in:
parent
53084f76ce
commit
0e5b6daa38
src/allmydata
@ -1,8 +1,6 @@
|
||||
"""Ported to Python 3.
|
||||
"""
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
from zope.interface import implementer
|
||||
from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
|
||||
IDeepCheckResults, IDeepCheckAndRepairResults, IURI, IDisplayableServer
|
||||
@ -64,7 +62,7 @@ class CheckResults(object):
|
||||
# unicode.
|
||||
if isinstance(summary, bytes):
|
||||
summary = unicode(summary, "utf-8")
|
||||
assert isinstance(summary, unicode) # should be a single string
|
||||
assert isinstance(summary, str) # should be a single string
|
||||
self._summary = summary
|
||||
assert not isinstance(report, str) # should be list of strings
|
||||
self._report = report
|
||||
|
@ -5,7 +5,6 @@ Ported to Python 3.
|
||||
from __future__ import annotations
|
||||
|
||||
from future.utils import native_str
|
||||
from past.builtins import long, unicode
|
||||
from six import ensure_str
|
||||
|
||||
import os, time, weakref, itertools
|
||||
@ -57,7 +56,7 @@ from eliot import (
|
||||
|
||||
_TOTAL_SHARES = Field.for_types(
|
||||
u"total_shares",
|
||||
[int, long],
|
||||
[int, int],
|
||||
u"The total number of shares desired.",
|
||||
)
|
||||
|
||||
@ -104,7 +103,7 @@ _HAPPINESS_MAPPINGS = Field(
|
||||
|
||||
_HAPPINESS = Field.for_types(
|
||||
u"happiness",
|
||||
[int, long],
|
||||
[int, int],
|
||||
u"The computed happiness of a certain placement.",
|
||||
)
|
||||
|
||||
@ -142,7 +141,7 @@ GET_SHARE_PLACEMENTS = MessageType(
|
||||
|
||||
_EFFECTIVE_HAPPINESS = Field.for_types(
|
||||
u"effective_happiness",
|
||||
[int, long],
|
||||
[int, int],
|
||||
u"The computed happiness value of a share placement map.",
|
||||
)
|
||||
|
||||
@ -1622,7 +1621,7 @@ class AssistedUploader(object):
|
||||
# abbreviated), so if we detect old results, just clobber them.
|
||||
|
||||
sharemap = upload_results.sharemap
|
||||
if any(isinstance(v, (bytes, unicode)) for v in sharemap.values()):
|
||||
if any(isinstance(v, (bytes, str)) for v in sharemap.values()):
|
||||
upload_results.sharemap = None
|
||||
|
||||
def _build_verifycap(self, helper_upload_results):
|
||||
@ -1701,7 +1700,7 @@ class BaseUploadable(object):
|
||||
def set_default_encoding_parameters(self, default_params):
|
||||
assert isinstance(default_params, dict)
|
||||
for k,v in default_params.items():
|
||||
precondition(isinstance(k, (bytes, unicode)), k, v)
|
||||
precondition(isinstance(k, (bytes, str)), k, v)
|
||||
precondition(isinstance(v, int), k, v)
|
||||
if "k" in default_params:
|
||||
self.default_encoding_param_k = default_params["k"]
|
||||
|
@ -24,13 +24,12 @@ def print(*args, **kwargs):
|
||||
encoding error handler and then write the result whereas builtin print
|
||||
uses the "strict" encoding error handler.
|
||||
"""
|
||||
from past.builtins import unicode
|
||||
out = kwargs.pop("file", None)
|
||||
if out is None:
|
||||
out = _sys_stdout
|
||||
encoding = out.encoding or "ascii"
|
||||
def ensafe(o):
|
||||
if isinstance(o, unicode):
|
||||
if isinstance(o, str):
|
||||
return o.encode(encoding, errors="replace").decode(encoding)
|
||||
return o
|
||||
return _print(
|
||||
|
@ -7,8 +7,6 @@ Methods ending in to_string() are actually to_bytes(), possibly should be fixed
|
||||
in follow-up port.
|
||||
"""
|
||||
|
||||
from past.builtins import unicode, long
|
||||
|
||||
import re
|
||||
from typing import Type
|
||||
|
||||
@ -91,7 +89,7 @@ class CHKFileURI(_BaseURI):
|
||||
def to_string(self):
|
||||
assert isinstance(self.needed_shares, int)
|
||||
assert isinstance(self.total_shares, int)
|
||||
assert isinstance(self.size, (int,long))
|
||||
assert isinstance(self.size, int)
|
||||
|
||||
return (b'URI:CHK:%s:%s:%d:%d:%d' %
|
||||
(base32.b2a(self.key),
|
||||
@ -147,7 +145,7 @@ class CHKFileVerifierURI(_BaseURI):
|
||||
def to_string(self):
|
||||
assert isinstance(self.needed_shares, int)
|
||||
assert isinstance(self.total_shares, int)
|
||||
assert isinstance(self.size, (int,long))
|
||||
assert isinstance(self.size, int)
|
||||
|
||||
return (b'URI:CHK-Verifier:%s:%s:%d:%d:%d' %
|
||||
(si_b2a(self.storage_index),
|
||||
@ -742,7 +740,7 @@ ALLEGED_IMMUTABLE_PREFIX = b'imm.'
|
||||
|
||||
def from_string(u, deep_immutable=False, name=u"<unknown name>"):
|
||||
"""Create URI from either unicode or byte string."""
|
||||
if isinstance(u, unicode):
|
||||
if isinstance(u, str):
|
||||
u = u.encode("utf-8")
|
||||
if not isinstance(u, bytes):
|
||||
raise TypeError("URI must be unicode string or bytes: %r" % (u,))
|
||||
@ -844,7 +842,7 @@ def is_uri(s):
|
||||
return False
|
||||
|
||||
def is_literal_file_uri(s):
|
||||
if isinstance(s, unicode):
|
||||
if isinstance(s, str):
|
||||
s = s.encode("utf-8")
|
||||
if not isinstance(s, bytes):
|
||||
return False
|
||||
@ -853,7 +851,7 @@ def is_literal_file_uri(s):
|
||||
s.startswith(ALLEGED_IMMUTABLE_PREFIX + b'URI:LIT:'))
|
||||
|
||||
def has_uri_prefix(s):
|
||||
if isinstance(s, unicode):
|
||||
if isinstance(s, str):
|
||||
s = s.encode("utf-8")
|
||||
if not isinstance(s, bytes):
|
||||
return False
|
||||
@ -895,9 +893,9 @@ def pack_extension(data):
|
||||
pieces = []
|
||||
for k in sorted(data.keys()):
|
||||
value = data[k]
|
||||
if isinstance(value, (int, long)):
|
||||
if isinstance(value, int):
|
||||
value = b"%d" % value
|
||||
if isinstance(k, unicode):
|
||||
if isinstance(k, str):
|
||||
k = k.encode("utf-8")
|
||||
assert isinstance(value, bytes), k
|
||||
assert re.match(br'^[a-zA-Z_\-]+$', k)
|
||||
|
Loading…
Reference in New Issue
Block a user