mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-04-08 11:24:25 +00:00
Merge remote-tracking branch 'origin/master' into 3645.gbs-expanded-motivation
This commit is contained in:
commit
2de90c4bfe
@ -89,6 +89,9 @@ workflows:
|
||||
- "typechecks":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
|
||||
- "docs":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
|
||||
images:
|
||||
# Build the Docker images used by the ci jobs. This makes the ci jobs
|
||||
# faster and takes various spurious failures out of the critical path.
|
||||
@ -458,6 +461,18 @@ jobs:
|
||||
command: |
|
||||
/tmp/venv/bin/tox -e typechecks
|
||||
|
||||
docs:
|
||||
docker:
|
||||
- <<: *DOCKERHUB_AUTH
|
||||
image: "tahoelafsci/ubuntu:18.04-py3"
|
||||
|
||||
steps:
|
||||
- "checkout"
|
||||
- run:
|
||||
name: "Build documentation"
|
||||
command: |
|
||||
/tmp/venv/bin/tox -e docs
|
||||
|
||||
build-image: &BUILD_IMAGE
|
||||
# This is a template for a job to build a Docker image that has as much of
|
||||
# the setup as we can manage already done and baked in. This cuts down on
|
||||
|
22
.lgtm.yml
22
.lgtm.yml
@ -1,22 +0,0 @@
|
||||
extraction:
|
||||
python:
|
||||
after_prepare:
|
||||
- |
|
||||
# https://discuss.lgtm.com/t/determination-of-python-requirements/974/4
|
||||
sed -i 's/\("pyOpenSSL\)/\# Dependency removed for lgtm (see .lgtm.yml): \1/g' src/allmydata/_auto_deps.py
|
||||
|
||||
queries:
|
||||
# This generates spurious errors for calls by interface because of the
|
||||
# zope.interface choice to exclude self from method signatures. So, turn it
|
||||
# off.
|
||||
- exclude: "py/call/wrong-arguments"
|
||||
|
||||
# The premise of this query is broken. The errors it produces are nonsense.
|
||||
# There is no such thing as a "procedure" in Python and "None" is not
|
||||
# meaningless.
|
||||
- exclude: "py/procedure-return-value-used"
|
||||
|
||||
# It is true that this query identifies things which are sometimes mistakes.
|
||||
# However, it also identifies things which are entirely valid. Therefore,
|
||||
# it produces noisy results.
|
||||
- exclude: "py/implicit-string-concatenation-in-list"
|
@ -28,7 +28,7 @@ import os
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = ['recommonmark']
|
||||
extensions = ['recommonmark', 'sphinx_rtd_theme']
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
@ -107,7 +107,7 @@ todo_include_todos = False
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'default'
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
|
@ -282,6 +282,19 @@ Because of the simple types used throughout
|
||||
and the equivalence described in `RFC 7049`_
|
||||
these examples should be representative regardless of which of these two encodings is chosen.
|
||||
|
||||
HTTP Design
|
||||
~~~~~~~~~~~
|
||||
|
||||
The HTTP interface described here is informed by the ideas of REST
|
||||
(Representational State Transfer).
|
||||
For ``GET`` requests query parameters are preferred over values encoded in the request body.
|
||||
For other requests query parameters are encoded into the message body.
|
||||
|
||||
Many branches of the resource tree are conceived as homogenous containers:
|
||||
one branch contains all of the share data;
|
||||
another branch contains all of the lease data;
|
||||
etc.
|
||||
|
||||
General
|
||||
~~~~~~~
|
||||
|
||||
@ -325,6 +338,9 @@ The lease expires after 31 days.
|
||||
Discussion
|
||||
``````````
|
||||
|
||||
We considered an alternative where ``renew-secret`` and ``cancel-secret`` are placed in query arguments on the request path.
|
||||
We chose to put these values into the request body to make the URL simpler.
|
||||
|
||||
Several behaviors here are blindly copied from the Foolscap-based storage server protocol.
|
||||
|
||||
* There is a cancel secret but there is no API to use it to cancel a lease.
|
||||
|
0
newsfragments/3631.minor
Normal file
0
newsfragments/3631.minor
Normal file
0
newsfragments/3632.minor
Normal file
0
newsfragments/3632.minor
Normal file
0
newsfragments/3634.minor
Normal file
0
newsfragments/3634.minor
Normal file
0
newsfragments/3638.minor
Normal file
0
newsfragments/3638.minor
Normal file
0
newsfragments/3640.minor
Normal file
0
newsfragments/3640.minor
Normal file
0
newsfragments/3646.minor
Normal file
0
newsfragments/3646.minor
Normal file
0
newsfragments/3647.minor
Normal file
0
newsfragments/3647.minor
Normal file
0
newsfragments/3649.minor
Normal file
0
newsfragments/3649.minor
Normal file
1
newsfragments/3650.bugfix
Normal file
1
newsfragments/3650.bugfix
Normal file
@ -0,0 +1 @@
|
||||
``tahoe invite`` will now read share encoding/placement configuration values from a Tahoe client node configuration file if they are not given on the command line, instead of raising an unhandled exception.
|
0
newsfragments/3654.minor
Normal file
0
newsfragments/3654.minor
Normal file
@ -303,8 +303,8 @@ class BackupDB_v2(object):
|
||||
for name in contents:
|
||||
entries.append( [name.encode("utf-8"), contents[name]] )
|
||||
entries.sort()
|
||||
data = "".join([netstring(name_utf8)+netstring(cap)
|
||||
for (name_utf8,cap) in entries])
|
||||
data = b"".join([netstring(name_utf8)+netstring(cap)
|
||||
for (name_utf8,cap) in entries])
|
||||
dirhash = backupdb_dirhash(data)
|
||||
dirhash_s = base32.b2a(dirhash)
|
||||
c = self.cursor
|
||||
|
@ -229,19 +229,19 @@ def get_alias(aliases, path_unicode, default):
|
||||
precondition(isinstance(path_unicode, str), path_unicode)
|
||||
|
||||
from allmydata import uri
|
||||
path = path_unicode.encode('utf-8').strip(" ")
|
||||
path = path_unicode.encode('utf-8').strip(b" ")
|
||||
if uri.has_uri_prefix(path):
|
||||
# We used to require "URI:blah:./foo" in order to get a subpath,
|
||||
# stripping out the ":./" sequence. We still allow that for compatibility,
|
||||
# but now also allow just "URI:blah/foo".
|
||||
sep = path.find(":./")
|
||||
sep = path.find(b":./")
|
||||
if sep != -1:
|
||||
return path[:sep], path[sep+3:]
|
||||
sep = path.find("/")
|
||||
sep = path.find(b"/")
|
||||
if sep != -1:
|
||||
return path[:sep], path[sep+1:]
|
||||
return path, ""
|
||||
colon = path.find(":")
|
||||
return path, b""
|
||||
colon = path.find(b":")
|
||||
if colon == -1:
|
||||
# no alias
|
||||
if default == None:
|
||||
|
@ -1,7 +1,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
from six.moves import cStringIO as StringIO
|
||||
from io import BytesIO
|
||||
from six.moves import urllib, http_client
|
||||
import six
|
||||
import allmydata # for __full_version__
|
||||
@ -38,9 +38,9 @@ class BadResponse(object):
|
||||
return ""
|
||||
|
||||
|
||||
def do_http(method, url, body=""):
|
||||
if isinstance(body, str):
|
||||
body = StringIO(body)
|
||||
def do_http(method, url, body=b""):
|
||||
if isinstance(body, bytes):
|
||||
body = BytesIO(body)
|
||||
elif isinstance(body, six.text_type):
|
||||
raise TypeError("do_http body must be a bytestring, not unicode")
|
||||
else:
|
||||
|
@ -11,10 +11,13 @@ if PY2:
|
||||
|
||||
import io
|
||||
import os
|
||||
import json
|
||||
|
||||
try:
|
||||
from allmydata.scripts.types_ import SubCommands
|
||||
from allmydata.scripts.types_ import (
|
||||
SubCommands,
|
||||
Parameters,
|
||||
Flags,
|
||||
)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
@ -32,7 +35,7 @@ from allmydata.scripts.common import (
|
||||
from allmydata.scripts.default_nodedir import _default_nodedir
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.encodingutil import listdir_unicode, argv_to_unicode, quote_local_unicode_path, get_io_encoding
|
||||
from allmydata.util import fileutil, i2p_provider, iputil, tor_provider
|
||||
from allmydata.util import fileutil, i2p_provider, iputil, tor_provider, jsonbytes as json
|
||||
|
||||
from wormhole import wormhole
|
||||
|
||||
@ -57,29 +60,29 @@ WHERE_OPTS = [
|
||||
"Hostname to automatically set --location/--port when --listen=tcp"),
|
||||
("listen", None, "tcp",
|
||||
"Comma-separated list of listener types (tcp,tor,i2p,none)."),
|
||||
]
|
||||
] # type: Parameters
|
||||
|
||||
TOR_OPTS = [
|
||||
("tor-control-port", None, None,
|
||||
"Tor's control port endpoint descriptor string (e.g. tcp:127.0.0.1:9051 or unix:/var/run/tor/control)"),
|
||||
("tor-executable", None, None,
|
||||
"The 'tor' executable to run (default is to search $PATH)."),
|
||||
]
|
||||
] # type: Parameters
|
||||
|
||||
TOR_FLAGS = [
|
||||
("tor-launch", None, "Launch a tor instead of connecting to a tor control port."),
|
||||
]
|
||||
] # type: Flags
|
||||
|
||||
I2P_OPTS = [
|
||||
("i2p-sam-port", None, None,
|
||||
"I2P's SAM API port endpoint descriptor string (e.g. tcp:127.0.0.1:7656)"),
|
||||
("i2p-executable", None, None,
|
||||
"(future) The 'i2prouter' executable to run (default is to search $PATH)."),
|
||||
]
|
||||
] # type: Parameters
|
||||
|
||||
I2P_FLAGS = [
|
||||
("i2p-launch", None, "(future) Launch an I2P router instead of connecting to a SAM API port."),
|
||||
]
|
||||
] # type: Flags
|
||||
|
||||
def validate_where_options(o):
|
||||
if o['listen'] == "none":
|
||||
@ -185,7 +188,7 @@ class CreateClientOptions(_CreateBaseOptions):
|
||||
("shares-happy", None, 7, "How many servers new files must be placed on."),
|
||||
("shares-total", None, 10, "Total shares required for uploaded files."),
|
||||
("join", None, None, "Join a grid with the given Invite Code."),
|
||||
]
|
||||
] # type: Parameters
|
||||
|
||||
# This is overridden in order to ensure we get a "Wrong number of
|
||||
# arguments." error when more than one argument is given.
|
||||
@ -389,7 +392,7 @@ def _get_config_via_wormhole(config):
|
||||
"client-v1": {},
|
||||
}
|
||||
}
|
||||
wh.send_message(json.dumps(intro))
|
||||
wh.send_message(json.dumps_bytes(intro))
|
||||
|
||||
server_intro = yield wh.get_message()
|
||||
server_intro = json.loads(server_intro)
|
||||
|
@ -1,9 +1,10 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
import os.path
|
||||
import codecs
|
||||
import json
|
||||
|
||||
from allmydata.util.assertutil import precondition
|
||||
|
||||
@ -12,6 +13,7 @@ from allmydata.scripts.common_http import do_http, check_http_error
|
||||
from allmydata.scripts.common import get_aliases
|
||||
from allmydata.util.fileutil import move_into_place
|
||||
from allmydata.util.encodingutil import quote_output, quote_output_u
|
||||
from allmydata.util import jsonbytes as json
|
||||
|
||||
|
||||
def add_line_to_aliasfile(aliasfile, alias, cap):
|
||||
@ -52,7 +54,7 @@ def add_alias(options):
|
||||
show_output(stderr, "Alias {alias} already exists!", alias=alias)
|
||||
return 1
|
||||
aliasfile = os.path.join(nodedir, "private", "aliases")
|
||||
cap = uri.from_string_dirnode(cap).to_string()
|
||||
cap = unicode(uri.from_string_dirnode(cap).to_string(), 'utf-8')
|
||||
|
||||
add_line_to_aliasfile(aliasfile, alias, cap)
|
||||
show_output(stdout, "Alias {alias} added", alias=alias)
|
||||
@ -92,7 +94,7 @@ def create_alias(options):
|
||||
|
||||
# probably check for others..
|
||||
|
||||
add_line_to_aliasfile(aliasfile, alias, new_uri)
|
||||
add_line_to_aliasfile(aliasfile, alias, unicode(new_uri, "utf-8"))
|
||||
show_output(stdout, "Alias {alias} created", alias=alias)
|
||||
return 0
|
||||
|
||||
@ -167,7 +169,10 @@ def list_aliases(options):
|
||||
data = _get_alias_details(options['node-directory'])
|
||||
|
||||
if options['json']:
|
||||
output = _escape_format(json.dumps(data, indent=4).decode("ascii"))
|
||||
dumped = json.dumps(data, indent=4)
|
||||
if isinstance(dumped, bytes):
|
||||
dumped = dumped.decode("utf-8")
|
||||
output = _escape_format(dumped)
|
||||
else:
|
||||
def dircap(details):
|
||||
return (
|
||||
|
@ -122,7 +122,7 @@ class FakeTransport(object):
|
||||
disconnecting = False
|
||||
|
||||
class DeepCheckOutput(LineOnlyReceiver, object):
|
||||
delimiter = "\n"
|
||||
delimiter = b"\n"
|
||||
def __init__(self, streamer, options):
|
||||
self.streamer = streamer
|
||||
self.transport = FakeTransport()
|
||||
@ -181,7 +181,7 @@ class DeepCheckOutput(LineOnlyReceiver, object):
|
||||
% (self.num_objects, self.files_healthy, self.files_unhealthy), file=stdout)
|
||||
|
||||
class DeepCheckAndRepairOutput(LineOnlyReceiver, object):
|
||||
delimiter = "\n"
|
||||
delimiter = b"\n"
|
||||
def __init__(self, streamer, options):
|
||||
self.streamer = streamer
|
||||
self.transport = FakeTransport()
|
||||
|
@ -1,7 +1,5 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
|
||||
try:
|
||||
from allmydata.scripts.types_ import SubCommands
|
||||
except ImportError:
|
||||
@ -13,8 +11,9 @@ from twisted.internet import defer, reactor
|
||||
from wormhole import wormhole
|
||||
|
||||
from allmydata.util.encodingutil import argv_to_abspath
|
||||
from allmydata.util import jsonbytes as json
|
||||
from allmydata.scripts.common import get_default_nodedir, get_introducer_furl
|
||||
from allmydata.node import read_config
|
||||
from allmydata.client import read_config
|
||||
|
||||
|
||||
class InviteOptions(usage.Options):
|
||||
@ -54,7 +53,7 @@ def _send_config_via_wormhole(options, config):
|
||||
code = yield wh.get_code()
|
||||
print("Invite Code for client: {}".format(code), file=out)
|
||||
|
||||
wh.send_message(json.dumps({
|
||||
wh.send_message(json.dumps_bytes({
|
||||
u"abilities": {
|
||||
u"server-v1": {},
|
||||
}
|
||||
@ -71,7 +70,7 @@ def _send_config_via_wormhole(options, config):
|
||||
defer.returnValue(1)
|
||||
|
||||
print(" transmitting configuration", file=out)
|
||||
wh.send_message(json.dumps(config))
|
||||
wh.send_message(json.dumps_bytes(config))
|
||||
yield wh.close()
|
||||
|
||||
|
||||
@ -94,9 +93,9 @@ def invite(options):
|
||||
nick = options['nick']
|
||||
|
||||
remote_config = {
|
||||
"shares-needed": options["shares-needed"] or config.get('client', 'shares.needed'),
|
||||
"shares-total": options["shares-total"] or config.get('client', 'shares.total'),
|
||||
"shares-happy": options["shares-happy"] or config.get('client', 'shares.happy'),
|
||||
"shares-needed": options["shares-needed"] or config.get_config('client', 'shares.needed'),
|
||||
"shares-total": options["shares-total"] or config.get_config('client', 'shares.total'),
|
||||
"shares-happy": options["shares-happy"] or config.get_config('client', 'shares.happy'),
|
||||
"nickname": nick,
|
||||
"introducer": introducer_furl,
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ class FakeTransport(object):
|
||||
disconnecting = False
|
||||
|
||||
class ManifestStreamer(LineOnlyReceiver, object):
|
||||
delimiter = "\n"
|
||||
delimiter = b"\n"
|
||||
|
||||
def __init__(self):
|
||||
self.transport = FakeTransport()
|
||||
|
@ -1,7 +1,9 @@
|
||||
from __future__ import print_function
|
||||
|
||||
from future.builtins import chr
|
||||
|
||||
import os
|
||||
import urllib
|
||||
from urllib.parse import urlencode, quote as url_quote
|
||||
|
||||
import json
|
||||
|
||||
@ -25,12 +27,12 @@ def _get_json_for_fragment(options, fragment, method='GET', post_args=None):
|
||||
if method == 'POST':
|
||||
if post_args is None:
|
||||
raise ValueError("Must pass post_args= for POST method")
|
||||
body = urllib.urlencode(post_args)
|
||||
body = urlencode(post_args)
|
||||
else:
|
||||
body = ''
|
||||
if post_args is not None:
|
||||
raise ValueError("post_args= only valid for POST method")
|
||||
resp = do_http(method, url, body=body)
|
||||
resp = do_http(method, url, body=body.encode("utf-8"))
|
||||
if isinstance(resp, BadResponse):
|
||||
# specifically NOT using format_http_error() here because the
|
||||
# URL is pretty sensitive (we're doing /uri/<key>).
|
||||
@ -48,7 +50,7 @@ def _get_json_for_fragment(options, fragment, method='GET', post_args=None):
|
||||
def _get_json_for_cap(options, cap):
|
||||
return _get_json_for_fragment(
|
||||
options,
|
||||
'uri/%s?t=json' % urllib.quote(cap),
|
||||
'uri/%s?t=json' % url_quote(cap),
|
||||
)
|
||||
|
||||
def pretty_progress(percent, size=10, ascii=False):
|
||||
@ -74,8 +76,8 @@ def pretty_progress(percent, size=10, ascii=False):
|
||||
|
||||
# unicode 0x2581 -> 2589 are vertical bar chunks, like rainbarf uses
|
||||
# and following are narrow -> wider bars
|
||||
part = unichr(0x258f - part) # for smooth bar
|
||||
# part = unichr(0x2581 + part) # for neater-looking thing
|
||||
part = chr(0x258f - part) # for smooth bar
|
||||
# part = chr(0x2581 + part) # for neater-looking thing
|
||||
|
||||
# hack for 100+ full so we don't print extra really-narrow/high bar
|
||||
if percent >= 100.0:
|
||||
|
@ -5,14 +5,16 @@ Type definitions used by modules in this package.
|
||||
# Python 3 only
|
||||
|
||||
from typing import List, Tuple, Type, Sequence, Any
|
||||
from allmydata.scripts.common import BaseOptions
|
||||
from twisted.python.usage import Options
|
||||
|
||||
|
||||
# Historically, subcommands were implemented as lists, but due to a
|
||||
# [designed contraint in mypy](https://stackoverflow.com/a/52559625/70170),
|
||||
# a Tuple is required.
|
||||
SubCommand = Tuple[str, None, Type[BaseOptions], str]
|
||||
SubCommand = Tuple[str, None, Type[Options], str]
|
||||
|
||||
SubCommands = List[SubCommand]
|
||||
|
||||
Parameters = List[Sequence[Any]]
|
||||
|
||||
Flags = List[Tuple[str, None, str]]
|
||||
|
@ -55,5 +55,5 @@ class CLITestMixin(ReallyEqualMixin):
|
||||
verb = ensure_str(verb)
|
||||
args = [ensure_str(arg) for arg in args]
|
||||
client_dir = ensure_str(self.get_clientdir(i=client_num))
|
||||
nodeargs = [ b"--node-directory", client_dir ]
|
||||
nodeargs = [ "--node-directory", client_dir ]
|
||||
return run_cli(verb, *args, nodeargs=nodeargs, **kwargs)
|
||||
|
@ -1,3 +1,15 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import json
|
||||
|
||||
from twisted.trial import unittest
|
||||
@ -59,7 +71,7 @@ class ListAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
# the node filesystem state.
|
||||
aliases = get_aliases(self.get_clientdir())
|
||||
self.assertIn(alias, aliases)
|
||||
self.assertTrue(aliases[alias].startswith(u"URI:DIR2:"))
|
||||
self.assertTrue(aliases[alias].startswith(b"URI:DIR2:"))
|
||||
|
||||
# And inspect the state via the user interface list-aliases command
|
||||
# too.
|
||||
|
@ -1,4 +1,15 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# Don't import future bytes so we don't break a couple of tests
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import sys
|
||||
import os.path, time
|
||||
@ -8,7 +19,7 @@ from twisted.trial import unittest
|
||||
from allmydata.util import fileutil
|
||||
from allmydata.util.encodingutil import listdir_unicode
|
||||
from allmydata.scripts import backupdb
|
||||
from .common_util import skip_if_cannot_represent_filename
|
||||
from ..common_util import skip_if_cannot_represent_filename
|
||||
|
||||
class BackupDB(unittest.TestCase):
|
||||
def create(self, dbfile):
|
||||
@ -70,7 +81,7 @@ class BackupDB(unittest.TestCase):
|
||||
|
||||
|
||||
def writeto(self, filename, data):
|
||||
fn = os.path.join(self.basedir, unicode(filename))
|
||||
fn = os.path.join(self.basedir, filename)
|
||||
parentdir = os.path.dirname(fn)
|
||||
fileutil.make_dirs(parentdir)
|
||||
fileutil.write(fn, data)
|
||||
@ -87,15 +98,15 @@ class BackupDB(unittest.TestCase):
|
||||
|
||||
r = bdb.check_file(foo_fn)
|
||||
self.failUnlessEqual(r.was_uploaded(), False)
|
||||
r.did_upload("foo-cap")
|
||||
r.did_upload(b"foo-cap")
|
||||
|
||||
r = bdb.check_file(blah_fn)
|
||||
self.failUnlessEqual(r.was_uploaded(), False)
|
||||
r.did_upload("blah-cap")
|
||||
|
||||
r = bdb.check_file(foo_fn)
|
||||
self.failUnlessEqual(r.was_uploaded(), "foo-cap")
|
||||
self.failUnlessEqual(type(r.was_uploaded()), str)
|
||||
self.failUnlessEqual(r.was_uploaded(), b"foo-cap")
|
||||
self.failUnlessEqual(type(r.was_uploaded()), bytes)
|
||||
self.failUnlessEqual(r.should_check(), False)
|
||||
|
||||
time.sleep(1.0) # make sure the timestamp changes
|
||||
@ -103,28 +114,28 @@ class BackupDB(unittest.TestCase):
|
||||
|
||||
r = bdb.check_file(foo_fn)
|
||||
self.failUnlessEqual(r.was_uploaded(), False)
|
||||
r.did_upload("new-cap")
|
||||
r.did_upload(b"new-cap")
|
||||
|
||||
r = bdb.check_file(foo_fn)
|
||||
self.failUnlessEqual(r.was_uploaded(), "new-cap")
|
||||
self.failUnlessEqual(r.was_uploaded(), b"new-cap")
|
||||
self.failUnlessEqual(r.should_check(), False)
|
||||
# if we spontaneously decide to upload it anyways, nothing should
|
||||
# break
|
||||
r.did_upload("new-cap")
|
||||
r.did_upload(b"new-cap")
|
||||
|
||||
r = bdb.check_file(foo_fn, use_timestamps=False)
|
||||
self.failUnlessEqual(r.was_uploaded(), False)
|
||||
r.did_upload("new-cap")
|
||||
r.did_upload(b"new-cap")
|
||||
|
||||
r = bdb.check_file(foo_fn)
|
||||
self.failUnlessEqual(r.was_uploaded(), "new-cap")
|
||||
self.failUnlessEqual(r.was_uploaded(), b"new-cap")
|
||||
self.failUnlessEqual(r.should_check(), False)
|
||||
|
||||
bdb.NO_CHECK_BEFORE = 0
|
||||
bdb.ALWAYS_CHECK_AFTER = 0.1
|
||||
|
||||
r = bdb.check_file(blah_fn)
|
||||
self.failUnlessEqual(r.was_uploaded(), "blah-cap")
|
||||
self.failUnlessEqual(r.was_uploaded(), b"blah-cap")
|
||||
self.failUnlessEqual(r.should_check(), True)
|
||||
r.did_check_healthy("results") # we know they're ignored for now
|
||||
|
||||
@ -132,7 +143,7 @@ class BackupDB(unittest.TestCase):
|
||||
bdb.ALWAYS_CHECK_AFTER = 400
|
||||
|
||||
r = bdb.check_file(blah_fn)
|
||||
self.failUnlessEqual(r.was_uploaded(), "blah-cap")
|
||||
self.failUnlessEqual(r.was_uploaded(), b"blah-cap")
|
||||
self.failUnlessEqual(r.should_check(), False)
|
||||
|
||||
os.unlink(os.path.join(basedir, "foo.txt"))
|
||||
@ -165,13 +176,13 @@ class BackupDB(unittest.TestCase):
|
||||
dbfile = os.path.join(basedir, "dbfile")
|
||||
bdb = self.create(dbfile)
|
||||
|
||||
contents = {u"file1": "URI:CHK:blah1",
|
||||
u"file2": "URI:CHK:blah2",
|
||||
u"dir1": "URI:DIR2-CHK:baz2"}
|
||||
contents = {u"file1": b"URI:CHK:blah1",
|
||||
u"file2": b"URI:CHK:blah2",
|
||||
u"dir1": b"URI:DIR2-CHK:baz2"}
|
||||
r = bdb.check_directory(contents)
|
||||
self.failUnless(isinstance(r, backupdb.DirectoryResult))
|
||||
self.failIf(r.was_created())
|
||||
dircap = "URI:DIR2-CHK:foo1"
|
||||
dircap = b"URI:DIR2-CHK:foo1"
|
||||
r.did_create(dircap)
|
||||
|
||||
r = bdb.check_directory(contents)
|
||||
@ -185,7 +196,7 @@ class BackupDB(unittest.TestCase):
|
||||
r = bdb.check_directory(contents)
|
||||
self.failUnless(r.was_created())
|
||||
self.failUnlessEqual(r.was_created(), dircap)
|
||||
self.failUnlessEqual(type(r.was_created()), str)
|
||||
self.failUnlessEqual(type(r.was_created()), bytes)
|
||||
self.failUnlessEqual(r.should_check(), False)
|
||||
|
||||
bdb.NO_CHECK_BEFORE = 0
|
||||
@ -207,14 +218,14 @@ class BackupDB(unittest.TestCase):
|
||||
self.failUnlessEqual(r.should_check(), False)
|
||||
|
||||
|
||||
contents2 = {u"file1": "URI:CHK:blah1",
|
||||
u"dir1": "URI:DIR2-CHK:baz2"}
|
||||
contents2 = {u"file1": b"URI:CHK:blah1",
|
||||
u"dir1": b"URI:DIR2-CHK:baz2"}
|
||||
r = bdb.check_directory(contents2)
|
||||
self.failIf(r.was_created())
|
||||
|
||||
contents3 = {u"file1": "URI:CHK:blah1",
|
||||
u"file2": "URI:CHK:blah3",
|
||||
u"dir1": "URI:DIR2-CHK:baz2"}
|
||||
contents3 = {u"file1": b"URI:CHK:blah1",
|
||||
u"file2": b"URI:CHK:blah3",
|
||||
u"dir1": b"URI:DIR2-CHK:baz2"}
|
||||
r = bdb.check_directory(contents3)
|
||||
self.failIf(r.was_created())
|
||||
|
||||
@ -228,17 +239,17 @@ class BackupDB(unittest.TestCase):
|
||||
bdb = self.create(dbfile)
|
||||
|
||||
self.writeto(u"f\u00f6\u00f6.txt", "foo.txt")
|
||||
files = [fn for fn in listdir_unicode(unicode(basedir)) if fn.endswith(".txt")]
|
||||
files = [fn for fn in listdir_unicode(str(basedir)) if fn.endswith(".txt")]
|
||||
self.failUnlessEqual(len(files), 1)
|
||||
foo_fn = os.path.join(basedir, files[0])
|
||||
#print(foo_fn, type(foo_fn))
|
||||
|
||||
r = bdb.check_file(foo_fn)
|
||||
self.failUnlessEqual(r.was_uploaded(), False)
|
||||
r.did_upload("foo-cap")
|
||||
r.did_upload(b"foo-cap")
|
||||
|
||||
r = bdb.check_file(foo_fn)
|
||||
self.failUnlessEqual(r.was_uploaded(), "foo-cap")
|
||||
self.failUnlessEqual(r.was_uploaded(), b"foo-cap")
|
||||
self.failUnlessEqual(r.should_check(), False)
|
||||
|
||||
bar_fn = self.writeto(u"b\u00e5r.txt", "bar.txt")
|
||||
@ -246,9 +257,9 @@ class BackupDB(unittest.TestCase):
|
||||
|
||||
r = bdb.check_file(bar_fn)
|
||||
self.failUnlessEqual(r.was_uploaded(), False)
|
||||
r.did_upload("bar-cap")
|
||||
r.did_upload(b"bar-cap")
|
||||
|
||||
r = bdb.check_file(bar_fn)
|
||||
self.failUnlessEqual(r.was_uploaded(), "bar-cap")
|
||||
self.failUnlessEqual(r.was_uploaded(), b"bar-cap")
|
||||
self.failUnlessEqual(r.should_check(), False)
|
||||
|
@ -1,8 +1,25 @@
|
||||
"""
|
||||
Ported to Pythn 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
import mock
|
||||
import json
|
||||
from os.path import join
|
||||
|
||||
try:
|
||||
from typing import Optional, Sequence
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
from ..common_util import run_cli
|
||||
@ -16,6 +33,8 @@ class _FakeWormhole(object):
|
||||
|
||||
def __init__(self, outgoing_messages):
|
||||
self.messages = []
|
||||
for o in outgoing_messages:
|
||||
assert isinstance(o, bytes)
|
||||
self._outgoing = outgoing_messages
|
||||
|
||||
def get_code(self):
|
||||
@ -26,15 +45,16 @@ class _FakeWormhole(object):
|
||||
|
||||
def get_welcome(self):
|
||||
return defer.succeed(
|
||||
json.dumps({
|
||||
{
|
||||
u"welcome": {},
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
def allocate_code(self):
|
||||
return None
|
||||
|
||||
def send_message(self, msg):
|
||||
assert isinstance(msg, bytes)
|
||||
self.messages.append(msg)
|
||||
|
||||
def get_message(self):
|
||||
@ -45,6 +65,10 @@ class _FakeWormhole(object):
|
||||
|
||||
|
||||
def _create_fake_wormhole(outgoing_messages):
|
||||
outgoing_messages = [
|
||||
m.encode("utf-8") if isinstance(m, str) else m
|
||||
for m in outgoing_messages
|
||||
]
|
||||
return _FakeWormhole(outgoing_messages)
|
||||
|
||||
|
||||
@ -144,17 +168,27 @@ class Invite(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
intro_dir,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_invite_success(self):
|
||||
def _invite_success(self, extra_args=(), tahoe_config=None):
|
||||
# type: (Sequence[bytes], Optional[bytes]) -> defer.Deferred
|
||||
"""
|
||||
successfully send an invite
|
||||
Exercise an expected-success case of ``tahoe invite``.
|
||||
|
||||
:param extra_args: Positional arguments to pass to ``tahoe invite``
|
||||
before the nickname.
|
||||
|
||||
:param tahoe_config: If given, bytes to write to the node's
|
||||
``tahoe.cfg`` before running ``tahoe invite.
|
||||
"""
|
||||
intro_dir = os.path.join(self.basedir, "introducer")
|
||||
# we've never run the introducer, so it hasn't created
|
||||
# introducer.furl yet
|
||||
priv_dir = join(intro_dir, "private")
|
||||
with open(join(priv_dir, "introducer.furl"), "w") as f:
|
||||
f.write("pb://fooblam\n")
|
||||
with open(join(priv_dir, "introducer.furl"), "w") as fobj_intro:
|
||||
fobj_intro.write("pb://fooblam\n")
|
||||
if tahoe_config is not None:
|
||||
assert isinstance(tahoe_config, bytes)
|
||||
with open(join(intro_dir, "tahoe.cfg"), "wb") as fobj_cfg:
|
||||
fobj_cfg.write(tahoe_config)
|
||||
|
||||
with mock.patch('allmydata.scripts.tahoe_invite.wormhole') as w:
|
||||
fake_wh = _create_fake_wormhole([
|
||||
@ -162,34 +196,79 @@ class Invite(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
])
|
||||
w.create = mock.Mock(return_value=fake_wh)
|
||||
|
||||
rc, out, err = yield run_cli(
|
||||
extra_args = tuple(extra_args)
|
||||
|
||||
d = run_cli(
|
||||
"-d", intro_dir,
|
||||
"invite",
|
||||
"--shares-needed", "1",
|
||||
"--shares-happy", "1",
|
||||
"--shares-total", "1",
|
||||
"foo",
|
||||
*(extra_args + ("foo",))
|
||||
)
|
||||
self.assertEqual(2, len(fake_wh.messages))
|
||||
self.assertEqual(
|
||||
json.loads(fake_wh.messages[0]),
|
||||
{
|
||||
"abilities":
|
||||
|
||||
def done(result):
|
||||
rc, out, err = result
|
||||
self.assertEqual(2, len(fake_wh.messages))
|
||||
self.assertEqual(
|
||||
json.loads(fake_wh.messages[0]),
|
||||
{
|
||||
"server-v1": {}
|
||||
"abilities":
|
||||
{
|
||||
"server-v1": {}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
self.assertEqual(
|
||||
json.loads(fake_wh.messages[1]),
|
||||
{
|
||||
"shares-needed": "1",
|
||||
"shares-total": "1",
|
||||
"nickname": "foo",
|
||||
"introducer": "pb://fooblam",
|
||||
"shares-happy": "1",
|
||||
},
|
||||
)
|
||||
)
|
||||
invite = json.loads(fake_wh.messages[1])
|
||||
self.assertEqual(
|
||||
invite["nickname"], "foo",
|
||||
)
|
||||
self.assertEqual(
|
||||
invite["introducer"], "pb://fooblam",
|
||||
)
|
||||
return invite
|
||||
d.addCallback(done)
|
||||
return d
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_invite_success(self):
|
||||
"""
|
||||
successfully send an invite
|
||||
"""
|
||||
invite = yield self._invite_success((
|
||||
"--shares-needed", "1",
|
||||
"--shares-happy", "2",
|
||||
"--shares-total", "3",
|
||||
))
|
||||
self.assertEqual(
|
||||
invite["shares-needed"], "1",
|
||||
)
|
||||
self.assertEqual(
|
||||
invite["shares-happy"], "2",
|
||||
)
|
||||
self.assertEqual(
|
||||
invite["shares-total"], "3",
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_invite_success_read_share_config(self):
|
||||
"""
|
||||
If ``--shares-{needed,happy,total}`` are not given on the command line
|
||||
then the invitation is generated using the configured values.
|
||||
"""
|
||||
invite = yield self._invite_success(tahoe_config=b"""
|
||||
[client]
|
||||
shares.needed = 2
|
||||
shares.happy = 4
|
||||
shares.total = 6
|
||||
""")
|
||||
self.assertEqual(
|
||||
invite["shares-needed"], "2",
|
||||
)
|
||||
self.assertEqual(
|
||||
invite["shares-happy"], "4",
|
||||
)
|
||||
self.assertEqual(
|
||||
invite["shares-total"], "6",
|
||||
)
|
||||
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_invite_no_furl(self):
|
||||
|
@ -1,10 +1,21 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
from six import ensure_text
|
||||
|
||||
import os
|
||||
import mock
|
||||
import json
|
||||
import tempfile
|
||||
from six.moves import StringIO
|
||||
from io import BytesIO, StringIO
|
||||
from os.path import join
|
||||
from UserDict import UserDict
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
@ -22,6 +33,7 @@ from allmydata.immutable.downloader.status import DownloadStatus
|
||||
from allmydata.mutable.publish import PublishStatus
|
||||
from allmydata.mutable.retrieve import RetrieveStatus
|
||||
from allmydata.mutable.servermap import UpdateStatus
|
||||
from allmydata.util import jsonbytes as json
|
||||
|
||||
from ..no_network import GridTestMixin
|
||||
from ..common_web import do_http
|
||||
@ -60,9 +72,8 @@ class ProgressBar(unittest.TestCase):
|
||||
)
|
||||
|
||||
|
||||
class _FakeOptions(UserDict, object):
|
||||
class _FakeOptions(dict):
|
||||
def __init__(self):
|
||||
super(_FakeOptions, self).__init__()
|
||||
self._tmp = tempfile.mkdtemp()
|
||||
os.mkdir(join(self._tmp, 'private'), 0o777)
|
||||
with open(join(self._tmp, 'private', 'api_auth_token'), 'w') as f:
|
||||
@ -86,7 +97,7 @@ class Integration(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
|
||||
# upload something
|
||||
c0 = self.g.clients[0]
|
||||
data = MutableData("data" * 100)
|
||||
data = MutableData(b"data" * 100)
|
||||
filenode = yield c0.create_mutable_file(data)
|
||||
self.uri = filenode.get_uri()
|
||||
|
||||
@ -97,8 +108,8 @@ class Integration(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
d = self.do_cli('status')# '--verbose')
|
||||
|
||||
def _check(ign):
|
||||
code, stdout, stdin = ign
|
||||
self.assertEqual(code, 0)
|
||||
code, stdout, stderr = ign
|
||||
self.assertEqual(code, 0, stderr)
|
||||
self.assertTrue('Skipped 1' in stdout)
|
||||
d.addCallback(_check)
|
||||
return d
|
||||
@ -124,18 +135,18 @@ class CommandStatus(unittest.TestCase):
|
||||
@mock.patch('sys.stdout', StringIO())
|
||||
def test_no_operations(self, http):
|
||||
values = [
|
||||
StringIO(json.dumps({
|
||||
StringIO(ensure_text(json.dumps({
|
||||
"active": [],
|
||||
"recent": [],
|
||||
})),
|
||||
StringIO(json.dumps({
|
||||
}))),
|
||||
StringIO(ensure_text(json.dumps({
|
||||
"counters": {
|
||||
"bytes_downloaded": 0,
|
||||
},
|
||||
"stats": {
|
||||
"node.uptime": 0,
|
||||
}
|
||||
})),
|
||||
}))),
|
||||
]
|
||||
http.side_effect = lambda *args, **kw: values.pop(0)
|
||||
do_status(self.options)
|
||||
@ -145,14 +156,14 @@ class CommandStatus(unittest.TestCase):
|
||||
def test_simple(self, http):
|
||||
recent_items = active_items = [
|
||||
UploadStatus(),
|
||||
DownloadStatus("abcd", 12345),
|
||||
DownloadStatus(b"abcd", 12345),
|
||||
PublishStatus(),
|
||||
RetrieveStatus(),
|
||||
UpdateStatus(),
|
||||
FakeStatus(),
|
||||
]
|
||||
values = [
|
||||
StringIO(json.dumps({
|
||||
BytesIO(json.dumps({
|
||||
"active": list(
|
||||
marshal_json(item)
|
||||
for item
|
||||
@ -163,15 +174,15 @@ class CommandStatus(unittest.TestCase):
|
||||
for item
|
||||
in recent_items
|
||||
),
|
||||
})),
|
||||
StringIO(json.dumps({
|
||||
}).encode("utf-8")),
|
||||
BytesIO(json.dumps({
|
||||
"counters": {
|
||||
"bytes_downloaded": 0,
|
||||
},
|
||||
"stats": {
|
||||
"node.uptime": 0,
|
||||
}
|
||||
})),
|
||||
}).encode("utf-8")),
|
||||
]
|
||||
http.side_effect = lambda *args, **kw: values.pop(0)
|
||||
do_status(self.options)
|
||||
|
@ -1,3 +1,14 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import reactor
|
||||
|
@ -1,3 +1,15 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, error
|
||||
|
@ -1,3 +1,15 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, error
|
||||
@ -173,7 +185,7 @@ class CreateOnion(unittest.TestCase):
|
||||
protocol)))
|
||||
txtorcon = mock.Mock()
|
||||
ehs = mock.Mock()
|
||||
ehs.private_key = "privkey"
|
||||
ehs.private_key = b"privkey"
|
||||
ehs.hostname = "ONION.onion"
|
||||
txtorcon.EphemeralHiddenService = mock.Mock(return_value=ehs)
|
||||
ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None))
|
||||
@ -208,7 +220,7 @@ class CreateOnion(unittest.TestCase):
|
||||
fn = os.path.join(basedir, tahoe_config_tor["onion.private_key_file"])
|
||||
with open(fn, "rb") as f:
|
||||
privkey = f.read()
|
||||
self.assertEqual(privkey, "privkey")
|
||||
self.assertEqual(privkey, b"privkey")
|
||||
|
||||
def test_launch(self):
|
||||
return self._do_test_launch(None)
|
||||
@ -227,7 +239,7 @@ class CreateOnion(unittest.TestCase):
|
||||
protocol)))
|
||||
txtorcon = mock.Mock()
|
||||
ehs = mock.Mock()
|
||||
ehs.private_key = "privkey"
|
||||
ehs.private_key = b"privkey"
|
||||
ehs.hostname = "ONION.onion"
|
||||
txtorcon.EphemeralHiddenService = mock.Mock(return_value=ehs)
|
||||
ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None))
|
||||
@ -259,7 +271,7 @@ class CreateOnion(unittest.TestCase):
|
||||
fn = os.path.join(basedir, tahoe_config_tor["onion.private_key_file"])
|
||||
with open(fn, "rb") as f:
|
||||
privkey = f.read()
|
||||
self.assertEqual(privkey, "privkey")
|
||||
self.assertEqual(privkey, b"privkey")
|
||||
|
||||
|
||||
_None = object()
|
||||
@ -590,7 +602,7 @@ class Provider_Service(unittest.TestCase):
|
||||
launch_tor.assert_called_with(reactor, None,
|
||||
os.path.join(basedir, "private"), txtorcon)
|
||||
txtorcon.EphemeralHiddenService.assert_called_with("456 127.0.0.1:123",
|
||||
"private key")
|
||||
b"private key")
|
||||
ehs.add_to_tor.assert_called_with(tor_state.protocol)
|
||||
|
||||
yield p.stopService()
|
||||
@ -632,7 +644,7 @@ class Provider_Service(unittest.TestCase):
|
||||
cfs.assert_called_with(reactor, "ep_desc")
|
||||
txtorcon.build_tor_connection.assert_called_with(tcep)
|
||||
txtorcon.EphemeralHiddenService.assert_called_with("456 127.0.0.1:123",
|
||||
"private key")
|
||||
b"private key")
|
||||
ehs.add_to_tor.assert_called_with(tor_state.protocol)
|
||||
|
||||
yield p.stopService()
|
||||
|
@ -507,7 +507,6 @@ class JSONBytes(unittest.TestCase):
|
||||
self.assertEqual(json.loads(encoded), expected)
|
||||
self.assertEqual(jsonbytes.loads(encoded), expected)
|
||||
|
||||
|
||||
def test_encode_unicode(self):
|
||||
"""BytesJSONEncoder encodes Unicode string as usual."""
|
||||
expected = {
|
||||
@ -515,3 +514,10 @@ class JSONBytes(unittest.TestCase):
|
||||
}
|
||||
encoded = jsonbytes.dumps(expected)
|
||||
self.assertEqual(json.loads(encoded), expected)
|
||||
|
||||
def test_dumps_bytes(self):
|
||||
"""jsonbytes.dumps_bytes always returns bytes."""
|
||||
x = {u"def\N{SNOWMAN}\uFF00": 123}
|
||||
encoded = jsonbytes.dumps_bytes(x)
|
||||
self.assertIsInstance(encoded, bytes)
|
||||
self.assertEqual(json.loads(encoded, encoding="utf-8"), x)
|
||||
|
@ -110,6 +110,7 @@ PORTED_MODULES = [
|
||||
"allmydata.util.happinessutil",
|
||||
"allmydata.util.hashutil",
|
||||
"allmydata.util.humanreadable",
|
||||
"allmydata.util.i2p_provider",
|
||||
"allmydata.util.idlib",
|
||||
"allmydata.util.iputil",
|
||||
"allmydata.util.jsonbytes",
|
||||
@ -123,6 +124,7 @@ PORTED_MODULES = [
|
||||
"allmydata.util.spans",
|
||||
"allmydata.util.statistics",
|
||||
"allmydata.util.time_format",
|
||||
"allmydata.util.tor_provider",
|
||||
"allmydata.web.check_results",
|
||||
"allmydata.web.common",
|
||||
"allmydata.web.directory",
|
||||
@ -141,7 +143,11 @@ PORTED_MODULES = [
|
||||
]
|
||||
|
||||
PORTED_TEST_MODULES = [
|
||||
"allmydata.test.cli.test_alias",
|
||||
"allmydata.test.cli.test_backupdb",
|
||||
"allmydata.test.cli.test_create",
|
||||
"allmydata.test.cli.test_invite",
|
||||
"allmydata.test.cli.test_status",
|
||||
|
||||
"allmydata.test.mutable.test_checker",
|
||||
"allmydata.test.mutable.test_datahandle",
|
||||
@ -167,6 +173,7 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_codec",
|
||||
"allmydata.test.test_common_util",
|
||||
"allmydata.test.test_configutil",
|
||||
"allmydata.test.test_connections",
|
||||
"allmydata.test.test_connection_status",
|
||||
"allmydata.test.test_crawler",
|
||||
"allmydata.test.test_crypto",
|
||||
@ -189,6 +196,7 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_helper",
|
||||
"allmydata.test.test_humanreadable",
|
||||
"allmydata.test.test_hung_server",
|
||||
"allmydata.test.test_i2p_provider",
|
||||
"allmydata.test.test_immutable",
|
||||
"allmydata.test.test_introducer",
|
||||
"allmydata.test.test_iputil",
|
||||
@ -218,6 +226,7 @@ PORTED_TEST_MODULES = [
|
||||
|
||||
"allmydata.test.test_testing",
|
||||
"allmydata.test.test_time_format",
|
||||
"allmydata.test.test_tor_provider",
|
||||
"allmydata.test.test_upload",
|
||||
"allmydata.test.test_uri",
|
||||
"allmydata.test.test_util",
|
||||
|
@ -1,5 +1,15 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, with_statement
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
|
||||
from zope.interface import (
|
||||
|
@ -9,7 +9,7 @@ from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
from future.utils import PY2, PY3
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
@ -51,6 +51,14 @@ def dumps(obj, *args, **kwargs):
|
||||
return json.dumps(obj, cls=BytesJSONEncoder, *args, **kwargs)
|
||||
|
||||
|
||||
def dumps_bytes(obj, *args, **kwargs):
|
||||
"""Encode to JSON, then encode as bytes."""
|
||||
result = dumps(obj, *args, **kwargs)
|
||||
if PY3:
|
||||
result = result.encode("utf-8")
|
||||
return result
|
||||
|
||||
|
||||
# To make this module drop-in compatible with json module:
|
||||
loads = json.loads
|
||||
|
||||
|
@ -1,5 +1,15 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, with_statement
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
|
||||
from zope.interface import (
|
||||
|
Loading…
x
Reference in New Issue
Block a user