mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-05-01 08:20:51 +00:00
Merge branch '3709-even-more-integration-tests-python-3' into 3716.allmydata-scripts-python-3-part-1
This commit is contained in:
commit
e0cab75cc6
1
.github/workflows/ci.yml
vendored
1
.github/workflows/ci.yml
vendored
@ -33,7 +33,6 @@ jobs:
|
|||||||
python-version: 2.7
|
python-version: 2.7
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
# See https://github.com/actions/checkout. A fetch-depth of 0
|
# See https://github.com/actions/checkout. A fetch-depth of 0
|
||||||
# fetches all tags and branches.
|
# fetches all tags and branches.
|
||||||
- name: Check out Tahoe-LAFS sources
|
- name: Check out Tahoe-LAFS sources
|
||||||
|
@ -1,5 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
import shutil
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
@ -1,12 +1,22 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_twisted
|
import pytest_twisted
|
||||||
|
|
||||||
import util
|
from . import util
|
||||||
|
|
||||||
from twisted.python.filepath import (
|
from twisted.python.filepath import (
|
||||||
FilePath,
|
FilePath,
|
||||||
@ -55,7 +65,7 @@ def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_ne
|
|||||||
cap = proto.output.getvalue().strip().split()[-1]
|
cap = proto.output.getvalue().strip().split()[-1]
|
||||||
print("TEH CAP!", cap)
|
print("TEH CAP!", cap)
|
||||||
|
|
||||||
proto = util._CollectOutputProtocol()
|
proto = util._CollectOutputProtocol(capture_stderr=False)
|
||||||
reactor.spawnProcess(
|
reactor.spawnProcess(
|
||||||
proto,
|
proto,
|
||||||
sys.executable,
|
sys.executable,
|
||||||
@ -68,7 +78,7 @@ def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_ne
|
|||||||
yield proto.done
|
yield proto.done
|
||||||
|
|
||||||
dave_got = proto.output.getvalue().strip()
|
dave_got = proto.output.getvalue().strip()
|
||||||
assert dave_got == open(gold_path, 'r').read().strip()
|
assert dave_got == open(gold_path, 'rb').read().strip()
|
||||||
|
|
||||||
|
|
||||||
@pytest_twisted.inlineCallbacks
|
@pytest_twisted.inlineCallbacks
|
||||||
@ -100,7 +110,7 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_
|
|||||||
# Which services should this client connect to?
|
# Which services should this client connect to?
|
||||||
write_introducer(node_dir, "default", introducer_furl)
|
write_introducer(node_dir, "default", introducer_furl)
|
||||||
with node_dir.child('tahoe.cfg').open('w') as f:
|
with node_dir.child('tahoe.cfg').open('w') as f:
|
||||||
f.write('''
|
node_config = '''
|
||||||
[node]
|
[node]
|
||||||
nickname = %(name)s
|
nickname = %(name)s
|
||||||
web.port = %(web_port)s
|
web.port = %(web_port)s
|
||||||
@ -125,7 +135,9 @@ shares.total = 2
|
|||||||
'log_furl': flog_gatherer,
|
'log_furl': flog_gatherer,
|
||||||
'control_port': control_port,
|
'control_port': control_port,
|
||||||
'local_port': control_port + 1000,
|
'local_port': control_port + 1000,
|
||||||
})
|
}
|
||||||
|
node_config = node_config.encode("utf-8")
|
||||||
|
f.write(node_config)
|
||||||
|
|
||||||
print("running")
|
print("running")
|
||||||
yield util._run_node(reactor, node_dir.path, request, None)
|
yield util._run_node(reactor, node_dir.path, request, None)
|
||||||
|
@ -7,17 +7,26 @@ Most of the tests have cursory asserts and encode 'what the WebAPI did
|
|||||||
at the time of testing' -- not necessarily a cohesive idea of what the
|
at the time of testing' -- not necessarily a cohesive idea of what the
|
||||||
WebAPI *should* do in every situation. It's not clear the latter
|
WebAPI *should* do in every situation. It's not clear the latter
|
||||||
exists anywhere, however.
|
exists anywhere, however.
|
||||||
|
|
||||||
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from past.builtins import unicode
|
from __future__ import unicode_literals
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import time
|
import time
|
||||||
import json
|
from urllib.parse import unquote as url_unquote, quote as url_quote
|
||||||
import urllib2
|
|
||||||
|
|
||||||
import allmydata.uri
|
import allmydata.uri
|
||||||
|
from allmydata.util import jsonbytes as json
|
||||||
|
|
||||||
import util
|
from . import util
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import html5lib
|
import html5lib
|
||||||
@ -66,7 +75,7 @@ def test_upload_download(alice):
|
|||||||
u"filename": u"boom",
|
u"filename": u"boom",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
assert data == FILE_CONTENTS
|
assert str(data, "utf-8") == FILE_CONTENTS
|
||||||
|
|
||||||
|
|
||||||
def test_put(alice):
|
def test_put(alice):
|
||||||
@ -97,7 +106,7 @@ def test_helper_status(storage_nodes):
|
|||||||
resp = requests.get(url)
|
resp = requests.get(url)
|
||||||
assert resp.status_code >= 200 and resp.status_code < 300
|
assert resp.status_code >= 200 and resp.status_code < 300
|
||||||
dom = BeautifulSoup(resp.content, "html5lib")
|
dom = BeautifulSoup(resp.content, "html5lib")
|
||||||
assert unicode(dom.h1.string) == u"Helper Status"
|
assert str(dom.h1.string) == u"Helper Status"
|
||||||
|
|
||||||
|
|
||||||
def test_deep_stats(alice):
|
def test_deep_stats(alice):
|
||||||
@ -117,10 +126,10 @@ def test_deep_stats(alice):
|
|||||||
|
|
||||||
# when creating a directory, we'll be re-directed to a URL
|
# when creating a directory, we'll be re-directed to a URL
|
||||||
# containing our writecap..
|
# containing our writecap..
|
||||||
uri = urllib2.unquote(resp.url)
|
uri = url_unquote(resp.url)
|
||||||
assert 'URI:DIR2:' in uri
|
assert 'URI:DIR2:' in uri
|
||||||
dircap = uri[uri.find("URI:DIR2:"):].rstrip('/')
|
dircap = uri[uri.find("URI:DIR2:"):].rstrip('/')
|
||||||
dircap_uri = util.node_url(alice.node_dir, "uri/{}".format(urllib2.quote(dircap)))
|
dircap_uri = util.node_url(alice.node_dir, "uri/{}".format(url_quote(dircap)))
|
||||||
|
|
||||||
# POST a file into this directory
|
# POST a file into this directory
|
||||||
FILE_CONTENTS = u"a file in a directory"
|
FILE_CONTENTS = u"a file in a directory"
|
||||||
@ -147,7 +156,7 @@ def test_deep_stats(alice):
|
|||||||
k, data = d
|
k, data = d
|
||||||
assert k == u"dirnode"
|
assert k == u"dirnode"
|
||||||
assert len(data['children']) == 1
|
assert len(data['children']) == 1
|
||||||
k, child = data['children'].values()[0]
|
k, child = list(data['children'].values())[0]
|
||||||
assert k == u"filenode"
|
assert k == u"filenode"
|
||||||
assert child['size'] == len(FILE_CONTENTS)
|
assert child['size'] == len(FILE_CONTENTS)
|
||||||
|
|
||||||
@ -198,11 +207,11 @@ def test_status(alice):
|
|||||||
|
|
||||||
print("Uploaded data, cap={}".format(cap))
|
print("Uploaded data, cap={}".format(cap))
|
||||||
resp = requests.get(
|
resp = requests.get(
|
||||||
util.node_url(alice.node_dir, u"uri/{}".format(urllib2.quote(cap))),
|
util.node_url(alice.node_dir, u"uri/{}".format(url_quote(cap))),
|
||||||
)
|
)
|
||||||
|
|
||||||
print("Downloaded {} bytes of data".format(len(resp.content)))
|
print("Downloaded {} bytes of data".format(len(resp.content)))
|
||||||
assert resp.content == FILE_CONTENTS
|
assert str(resp.content, "ascii") == FILE_CONTENTS
|
||||||
|
|
||||||
resp = requests.get(
|
resp = requests.get(
|
||||||
util.node_url(alice.node_dir, "status"),
|
util.node_url(alice.node_dir, "status"),
|
||||||
@ -221,12 +230,12 @@ def test_status(alice):
|
|||||||
continue
|
continue
|
||||||
resp = requests.get(util.node_url(alice.node_dir, href))
|
resp = requests.get(util.node_url(alice.node_dir, href))
|
||||||
if href.startswith(u"/status/up"):
|
if href.startswith(u"/status/up"):
|
||||||
assert "File Upload Status" in resp.content
|
assert b"File Upload Status" in resp.content
|
||||||
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
|
if b"Total Size: %d" % (len(FILE_CONTENTS),) in resp.content:
|
||||||
found_upload = True
|
found_upload = True
|
||||||
elif href.startswith(u"/status/down"):
|
elif href.startswith(u"/status/down"):
|
||||||
assert "File Download Status" in resp.content
|
assert b"File Download Status" in resp.content
|
||||||
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
|
if b"Total Size: %d" % (len(FILE_CONTENTS),) in resp.content:
|
||||||
found_download = True
|
found_download = True
|
||||||
|
|
||||||
# download the specialized event information
|
# download the specialized event information
|
||||||
@ -299,7 +308,7 @@ def test_directory_deep_check(alice):
|
|||||||
print("Uploaded data1, cap={}".format(cap1))
|
print("Uploaded data1, cap={}".format(cap1))
|
||||||
|
|
||||||
resp = requests.get(
|
resp = requests.get(
|
||||||
util.node_url(alice.node_dir, u"uri/{}".format(urllib2.quote(cap0))),
|
util.node_url(alice.node_dir, u"uri/{}".format(url_quote(cap0))),
|
||||||
params={u"t": u"info"},
|
params={u"t": u"info"},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -400,9 +409,9 @@ def test_directory_deep_check(alice):
|
|||||||
for _ in range(5):
|
for _ in range(5):
|
||||||
resp = requests.get(deepcheck_uri)
|
resp = requests.get(deepcheck_uri)
|
||||||
dom = BeautifulSoup(resp.content, "html5lib")
|
dom = BeautifulSoup(resp.content, "html5lib")
|
||||||
if dom.h1 and u'Results' in unicode(dom.h1.string):
|
if dom.h1 and u'Results' in str(dom.h1.string):
|
||||||
break
|
break
|
||||||
if dom.h2 and dom.h2.a and u"Reload" in unicode(dom.h2.a.string):
|
if dom.h2 and dom.h2.a and u"Reload" in str(dom.h2.a.string):
|
||||||
dom = None
|
dom = None
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
assert dom is not None, "Operation never completed"
|
assert dom is not None, "Operation never completed"
|
||||||
@ -440,7 +449,7 @@ def test_introducer_info(introducer):
|
|||||||
resp = requests.get(
|
resp = requests.get(
|
||||||
util.node_url(introducer.node_dir, u""),
|
util.node_url(introducer.node_dir, u""),
|
||||||
)
|
)
|
||||||
assert "Introducer" in resp.content
|
assert b"Introducer" in resp.content
|
||||||
|
|
||||||
resp = requests.get(
|
resp = requests.get(
|
||||||
util.node_url(introducer.node_dir, u""),
|
util.node_url(introducer.node_dir, u""),
|
||||||
@ -513,6 +522,6 @@ def test_mkdir_with_children(alice):
|
|||||||
params={u"t": "mkdir-with-children"},
|
params={u"t": "mkdir-with-children"},
|
||||||
data=json.dumps(meta),
|
data=json.dumps(meta),
|
||||||
)
|
)
|
||||||
assert resp.startswith("URI:DIR2")
|
assert resp.startswith(b"URI:DIR2")
|
||||||
cap = allmydata.uri.from_string(resp)
|
cap = allmydata.uri.from_string(resp)
|
||||||
assert isinstance(cap, allmydata.uri.DirectoryURI)
|
assert isinstance(cap, allmydata.uri.DirectoryURI)
|
||||||
|
@ -1,4 +1,14 @@
|
|||||||
from past.builtins import unicode
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
@ -57,9 +67,10 @@ class _CollectOutputProtocol(ProcessProtocol):
|
|||||||
self.output, and callback's on done with all of it after the
|
self.output, and callback's on done with all of it after the
|
||||||
process exits (for any reason).
|
process exits (for any reason).
|
||||||
"""
|
"""
|
||||||
def __init__(self):
|
def __init__(self, capture_stderr=True):
|
||||||
self.done = Deferred()
|
self.done = Deferred()
|
||||||
self.output = BytesIO()
|
self.output = BytesIO()
|
||||||
|
self.capture_stderr = capture_stderr
|
||||||
|
|
||||||
def processEnded(self, reason):
|
def processEnded(self, reason):
|
||||||
if not self.done.called:
|
if not self.done.called:
|
||||||
@ -74,6 +85,7 @@ class _CollectOutputProtocol(ProcessProtocol):
|
|||||||
|
|
||||||
def errReceived(self, data):
|
def errReceived(self, data):
|
||||||
print("ERR: {!r}".format(data))
|
print("ERR: {!r}".format(data))
|
||||||
|
if self.capture_stderr:
|
||||||
self.output.write(data)
|
self.output.write(data)
|
||||||
|
|
||||||
|
|
||||||
@ -94,11 +106,11 @@ class _DumpOutputProtocol(ProcessProtocol):
|
|||||||
self.done.errback(reason)
|
self.done.errback(reason)
|
||||||
|
|
||||||
def outReceived(self, data):
|
def outReceived(self, data):
|
||||||
data = unicode(data, sys.stdout.encoding)
|
data = str(data, sys.stdout.encoding)
|
||||||
self._out.write(data)
|
self._out.write(data)
|
||||||
|
|
||||||
def errReceived(self, data):
|
def errReceived(self, data):
|
||||||
data = unicode(data, sys.stdout.encoding)
|
data = str(data, sys.stdout.encoding)
|
||||||
self._out.write(data)
|
self._out.write(data)
|
||||||
|
|
||||||
|
|
||||||
@ -118,7 +130,7 @@ class _MagicTextProtocol(ProcessProtocol):
|
|||||||
self.exited.callback(None)
|
self.exited.callback(None)
|
||||||
|
|
||||||
def outReceived(self, data):
|
def outReceived(self, data):
|
||||||
data = unicode(data, sys.stdout.encoding)
|
data = str(data, sys.stdout.encoding)
|
||||||
sys.stdout.write(data)
|
sys.stdout.write(data)
|
||||||
self._output.write(data)
|
self._output.write(data)
|
||||||
if not self.magic_seen.called and self._magic_text in self._output.getvalue():
|
if not self.magic_seen.called and self._magic_text in self._output.getvalue():
|
||||||
@ -126,7 +138,7 @@ class _MagicTextProtocol(ProcessProtocol):
|
|||||||
self.magic_seen.callback(self)
|
self.magic_seen.callback(self)
|
||||||
|
|
||||||
def errReceived(self, data):
|
def errReceived(self, data):
|
||||||
data = unicode(data, sys.stderr.encoding)
|
data = str(data, sys.stderr.encoding)
|
||||||
sys.stdout.write(data)
|
sys.stdout.write(data)
|
||||||
|
|
||||||
|
|
||||||
@ -267,9 +279,9 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
|
|||||||
'--hostname', 'localhost',
|
'--hostname', 'localhost',
|
||||||
'--listen', 'tcp',
|
'--listen', 'tcp',
|
||||||
'--webport', web_port,
|
'--webport', web_port,
|
||||||
'--shares-needed', unicode(needed),
|
'--shares-needed', str(needed),
|
||||||
'--shares-happy', unicode(happy),
|
'--shares-happy', str(happy),
|
||||||
'--shares-total', unicode(total),
|
'--shares-total', str(total),
|
||||||
'--helper',
|
'--helper',
|
||||||
]
|
]
|
||||||
if not storage:
|
if not storage:
|
||||||
|
0
newsfragments/3709.minor
Normal file
0
newsfragments/3709.minor
Normal file
0
newsfragments/3717.minor
Normal file
0
newsfragments/3717.minor
Normal file
@ -6,7 +6,7 @@ from __future__ import unicode_literals
|
|||||||
from future.utils import PY2
|
from future.utils import PY2
|
||||||
if PY2:
|
if PY2:
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
import warnings
|
|
||||||
import os, sys
|
import os, sys
|
||||||
from six.moves import StringIO
|
from six.moves import StringIO
|
||||||
import six
|
import six
|
||||||
@ -183,10 +183,12 @@ def _maybe_enable_eliot_logging(options, reactor):
|
|||||||
# Pass on the options so we can dispatch the subcommand.
|
# Pass on the options so we can dispatch the subcommand.
|
||||||
return options
|
return options
|
||||||
|
|
||||||
|
PYTHON_3_WARNING = ("Support for Python 3 is an incomplete work-in-progress."
|
||||||
|
" Use at your own risk.")
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
if six.PY3:
|
if six.PY3:
|
||||||
warnings.warn("Support for Python 3 is an incomplete work-in-progress."
|
print(PYTHON_3_WARNING, file=sys.stderr)
|
||||||
" Use at your own risk.")
|
|
||||||
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
from allmydata.windows.fixups import initialize
|
from allmydata.windows.fixups import initialize
|
||||||
|
@ -43,6 +43,7 @@ from allmydata.monitor import Monitor
|
|||||||
from allmydata.mutable.common import NotWriteableError
|
from allmydata.mutable.common import NotWriteableError
|
||||||
from allmydata.mutable import layout as mutable_layout
|
from allmydata.mutable import layout as mutable_layout
|
||||||
from allmydata.mutable.publish import MutableData
|
from allmydata.mutable.publish import MutableData
|
||||||
|
from allmydata.scripts.runner import PYTHON_3_WARNING
|
||||||
|
|
||||||
from foolscap.api import DeadReferenceError, fireEventually, flushEventualQueue
|
from foolscap.api import DeadReferenceError, fireEventually, flushEventualQueue
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
@ -2635,7 +2636,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
|||||||
out, err, rc_or_sig = res
|
out, err, rc_or_sig = res
|
||||||
self.failUnlessEqual(rc_or_sig, 0, str(res))
|
self.failUnlessEqual(rc_or_sig, 0, str(res))
|
||||||
if check_stderr:
|
if check_stderr:
|
||||||
self.failUnlessEqual(err, b"")
|
self.assertIn(err.strip(), (b"", PYTHON_3_WARNING.encode("ascii")))
|
||||||
|
|
||||||
d.addCallback(_run_in_subprocess, "create-alias", "newalias")
|
d.addCallback(_run_in_subprocess, "create-alias", "newalias")
|
||||||
d.addCallback(_check_succeeded)
|
d.addCallback(_check_succeeded)
|
||||||
@ -2655,7 +2656,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
|||||||
def _check_ls(res):
|
def _check_ls(res):
|
||||||
out, err, rc_or_sig = res
|
out, err, rc_or_sig = res
|
||||||
self.failUnlessEqual(rc_or_sig, 0, str(res))
|
self.failUnlessEqual(rc_or_sig, 0, str(res))
|
||||||
self.failUnlessEqual(err, b"", str(res))
|
self.assertIn(err.strip(), (b"", PYTHON_3_WARNING.encode("ascii")))
|
||||||
self.failUnlessIn(b"tahoe-moved", out)
|
self.failUnlessIn(b"tahoe-moved", out)
|
||||||
self.failIfIn(b"tahoe-file", out)
|
self.failIfIn(b"tahoe-file", out)
|
||||||
d.addCallback(_check_ls)
|
d.addCallback(_check_ls)
|
||||||
|
@ -14,6 +14,7 @@ import os
|
|||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.internet import defer, error
|
from twisted.internet import defer, error
|
||||||
from six.moves import StringIO
|
from six.moves import StringIO
|
||||||
|
from six import ensure_str
|
||||||
import mock
|
import mock
|
||||||
from ..util import tor_provider
|
from ..util import tor_provider
|
||||||
from ..scripts import create_node, runner
|
from ..scripts import create_node, runner
|
||||||
@ -185,7 +186,8 @@ class CreateOnion(unittest.TestCase):
|
|||||||
protocol)))
|
protocol)))
|
||||||
txtorcon = mock.Mock()
|
txtorcon = mock.Mock()
|
||||||
ehs = mock.Mock()
|
ehs = mock.Mock()
|
||||||
ehs.private_key = b"privkey"
|
# This appears to be a native string in the real txtorcon object...
|
||||||
|
ehs.private_key = ensure_str("privkey")
|
||||||
ehs.hostname = "ONION.onion"
|
ehs.hostname = "ONION.onion"
|
||||||
txtorcon.EphemeralHiddenService = mock.Mock(return_value=ehs)
|
txtorcon.EphemeralHiddenService = mock.Mock(return_value=ehs)
|
||||||
ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None))
|
ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None))
|
||||||
|
@ -16,17 +16,20 @@ from future.utils import PY2
|
|||||||
if PY2:
|
if PY2:
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
# Every time a module is added here, also add it to tox.ini environment
|
|
||||||
# integrations3. Bit of duplication, but it's only a handful of files and quite
|
|
||||||
# temporary, just until we've ported them all.
|
|
||||||
PORTED_INTEGRATION_TESTS = [
|
PORTED_INTEGRATION_TESTS = [
|
||||||
"integration.test_aaa_aardvark",
|
"integration.test_aaa_aardvark",
|
||||||
"integration.test_servers_of_happiness",
|
"integration.test_servers_of_happiness",
|
||||||
"integration.test_sftp",
|
"integration.test_sftp",
|
||||||
"integration.test_streaming_logs",
|
"integration.test_streaming_logs",
|
||||||
|
"integration.test_tor",
|
||||||
|
"integration.test_web",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
PORTED_INTEGRATION_MODULES = [
|
||||||
|
"integration",
|
||||||
|
"integration.conftest",
|
||||||
|
"integration.util",
|
||||||
|
]
|
||||||
|
|
||||||
# Keep these sorted alphabetically, to reduce merge conflicts:
|
# Keep these sorted alphabetically, to reduce merge conflicts:
|
||||||
PORTED_MODULES = [
|
PORTED_MODULES = [
|
||||||
|
@ -211,6 +211,8 @@ def create_config(reactor, cli_config):
|
|||||||
"tor_onion.privkey")
|
"tor_onion.privkey")
|
||||||
privkeyfile = os.path.join(private_dir, "tor_onion.privkey")
|
privkeyfile = os.path.join(private_dir, "tor_onion.privkey")
|
||||||
with open(privkeyfile, "wb") as f:
|
with open(privkeyfile, "wb") as f:
|
||||||
|
if isinstance(privkey, str):
|
||||||
|
privkey = privkey.encode("ascii")
|
||||||
f.write(privkey)
|
f.write(privkey)
|
||||||
|
|
||||||
# tahoe_config_tor: this is a dictionary of keys/values to add to the
|
# tahoe_config_tor: this is a dictionary of keys/values to add to the
|
||||||
|
@ -1173,7 +1173,8 @@ class MapupdateStatusElement(Element):
|
|||||||
def privkey_from(self, req, tag):
|
def privkey_from(self, req, tag):
|
||||||
server = self._update_status.get_privkey_from()
|
server = self._update_status.get_privkey_from()
|
||||||
if server:
|
if server:
|
||||||
return tag(tags.li("Got privkey from: [%s]" % server.get_name()))
|
return tag(tags.li("Got privkey from: [%s]" % str(
|
||||||
|
server.get_name(), "utf-8")))
|
||||||
else:
|
else:
|
||||||
return tag
|
return tag
|
||||||
|
|
||||||
|
4
tox.ini
4
tox.ini
@ -104,7 +104,7 @@ setenv =
|
|||||||
commands =
|
commands =
|
||||||
python --version
|
python --version
|
||||||
# NOTE: 'run with "py.test --keep-tempdir -s -v integration/" to debug failures'
|
# NOTE: 'run with "py.test --keep-tempdir -s -v integration/" to debug failures'
|
||||||
python3 -b -m pytest --timeout=1800 --coverage -v {posargs:integration/test_aaa_aardvark.py integration/test_servers_of_happiness.py integration/test_sftp.py integration/test_streaming_logs.py}
|
python3 -b -m pytest --timeout=1800 --coverage -v {posargs:integration}
|
||||||
coverage combine
|
coverage combine
|
||||||
coverage report
|
coverage report
|
||||||
|
|
||||||
@ -281,6 +281,8 @@ deps =
|
|||||||
# PyInstaller 4.0 drops Python 2 support. When we finish porting to
|
# PyInstaller 4.0 drops Python 2 support. When we finish porting to
|
||||||
# Python 3 we can reconsider this constraint.
|
# Python 3 we can reconsider this constraint.
|
||||||
pyinstaller < 4.0
|
pyinstaller < 4.0
|
||||||
|
# 2021.5.13 broke on Windows. See https://github.com/erocarrera/pefile/issues/318
|
||||||
|
pefile < 2021.5.13 ; platform_system == "Windows"
|
||||||
# Setting PYTHONHASHSEED to a known value assists with reproducible builds.
|
# Setting PYTHONHASHSEED to a known value assists with reproducible builds.
|
||||||
# See https://pyinstaller.readthedocs.io/en/stable/advanced-topics.html#creating-a-reproducible-build
|
# See https://pyinstaller.readthedocs.io/en/stable/advanced-topics.html#creating-a-reproducible-build
|
||||||
setenv=PYTHONHASHSEED=1
|
setenv=PYTHONHASHSEED=1
|
||||||
|
Loading…
x
Reference in New Issue
Block a user