mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-20 05:28:04 +00:00
Merge remote-tracking branch 'origin/master' into 3592.convergence-hasher-tag-tests
This commit is contained in:
commit
15dbbce217
62
.github/workflows/ci.yml
vendored
62
.github/workflows/ci.yml
vendored
@ -30,17 +30,37 @@ jobs:
|
||||
with:
|
||||
args: install vcpython27
|
||||
|
||||
# See https://github.com/actions/checkout. A fetch-depth of 0
|
||||
# fetches all tags and branches.
|
||||
- name: Check out Tahoe-LAFS sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Fetch all history for all tags and branches
|
||||
run: git fetch --prune --unshallow
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
# To use pip caching with GitHub Actions in an OS-independent
|
||||
# manner, we need `pip cache dir` command, which became
|
||||
# available since pip v20.1+. At the time of writing this,
|
||||
# GitHub Actions offers pip v20.3.3 for both ubuntu-latest and
|
||||
# windows-latest, and pip v20.3.1 for macos-latest.
|
||||
- name: Get pip cache directory
|
||||
id: pip-cache
|
||||
run: |
|
||||
echo "::set-output name=dir::$(pip cache dir)"
|
||||
|
||||
# See https://github.com/actions/cache
|
||||
- name: Use pip cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade codecov tox setuptools
|
||||
@ -103,15 +123,27 @@ jobs:
|
||||
|
||||
- name: Check out Tahoe-LAFS sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Fetch all history for all tags and branches
|
||||
run: git fetch --prune --unshallow
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Get pip cache directory
|
||||
id: pip-cache
|
||||
run: |
|
||||
echo "::set-output name=dir::$(pip cache dir)"
|
||||
|
||||
- name: Use pip cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade tox
|
||||
@ -155,15 +187,27 @@ jobs:
|
||||
|
||||
- name: Check out Tahoe-LAFS sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Fetch all history for all tags and branches
|
||||
run: git fetch --prune --unshallow
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Get pip cache directory
|
||||
id: pip-cache
|
||||
run: |
|
||||
echo "::set-output name=dir::$(pip cache dir)"
|
||||
|
||||
- name: Use pip cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade tox
|
||||
|
@ -28,7 +28,7 @@ import os
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = []
|
||||
extensions = ['recommonmark']
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
@ -36,7 +36,7 @@ templates_path = ['_templates']
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
# source_suffix = ['.rst', '.md']
|
||||
source_suffix = '.rst'
|
||||
source_suffix = ['.rst', '.md']
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
1
docs/contributing.rst
Normal file
1
docs/contributing.rst
Normal file
@ -0,0 +1 @@
|
||||
.. include:: ../.github/CONTRIBUTING.rst
|
@ -2032,10 +2032,11 @@ potential for surprises when the file store structure is changed.
|
||||
|
||||
Tahoe-LAFS provides a mutable file store, but the ways that the store can
|
||||
change are limited. The only things that can change are:
|
||||
* the mapping from child names to child objects inside mutable directories
|
||||
(by adding a new child, removing an existing child, or changing an
|
||||
existing child to point to a different object)
|
||||
* the contents of mutable files
|
||||
|
||||
* the mapping from child names to child objects inside mutable directories
|
||||
(by adding a new child, removing an existing child, or changing an
|
||||
existing child to point to a different object)
|
||||
* the contents of mutable files
|
||||
|
||||
Obviously if you query for information about the file store and then act
|
||||
to change it (such as by getting a listing of the contents of a mutable
|
||||
|
@ -23,8 +23,9 @@ Contents:
|
||||
frontends/download-status
|
||||
|
||||
known_issues
|
||||
../.github/CONTRIBUTING
|
||||
contributing
|
||||
CODE_OF_CONDUCT
|
||||
release-checklist
|
||||
|
||||
servers
|
||||
helper
|
||||
|
@ -40,23 +40,31 @@ Create Branch and Apply Updates
|
||||
- Create a branch for release-candidates (e.g. `XXXX.release-1.15.0.rc0`)
|
||||
- run `tox -e news` to produce a new NEWS.txt file (this does a commit)
|
||||
- create the news for the release
|
||||
|
||||
- newsfragments/<ticket number>.minor
|
||||
- commit it
|
||||
|
||||
- manually fix NEWS.txt
|
||||
|
||||
- proper title for latest release ("Release 1.15.0" instead of "Release ...post1432")
|
||||
- double-check date (maybe release will be in the future)
|
||||
- spot-check the release notes (these come from the newsfragments
|
||||
files though so don't do heavy editing)
|
||||
- commit these changes
|
||||
|
||||
- update "relnotes.txt"
|
||||
|
||||
- update all mentions of 1.14.0 -> 1.15.0
|
||||
- update "previous release" statement and date
|
||||
- summarize major changes
|
||||
- commit it
|
||||
|
||||
- update "CREDITS"
|
||||
|
||||
- are there any new contributors in this release?
|
||||
- one way: git log release-1.14.0.. | grep Author | sort | uniq
|
||||
- commit it
|
||||
|
||||
- update "docs/known_issues.rst" if appropriate
|
||||
- update "docs/INSTALL.rst" references to the new release
|
||||
- Push the branch to github
|
||||
@ -82,25 +90,36 @@ they will need to evaluate which contributors' signatures they trust.
|
||||
|
||||
- (all steps above are completed)
|
||||
- sign the release
|
||||
|
||||
- git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-1.15.0rc0" tahoe-lafs-1.15.0rc0
|
||||
- (replace the key-id above with your own)
|
||||
|
||||
- build all code locally
|
||||
- these should all pass:
|
||||
|
||||
- tox -e py27,codechecks,docs,integration
|
||||
|
||||
- these can fail (ideally they should not of course):
|
||||
|
||||
- tox -e deprecations,upcoming-deprecations
|
||||
|
||||
- build tarballs
|
||||
|
||||
- tox -e tarballs
|
||||
- confirm it at least exists:
|
||||
- ls dist/ | grep 1.15.0rc0
|
||||
|
||||
- inspect and test the tarballs
|
||||
|
||||
- install each in a fresh virtualenv
|
||||
- run `tahoe` command
|
||||
|
||||
- when satisfied, sign the tarballs:
|
||||
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0-py2-none-any.whl
|
||||
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.tar.bz2
|
||||
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.tar.gz
|
||||
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.zip
|
||||
|
||||
- gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0-py2-none-any.whl
|
||||
- gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.tar.bz2
|
||||
- gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.tar.gz
|
||||
- gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.zip
|
||||
|
||||
|
||||
Privileged Contributor
|
||||
@ -129,6 +148,7 @@ need to be uploaded to https://tahoe-lafs.org in `~source/downloads`
|
||||
https://tahoe-lafs.org/downloads/ on the Web.
|
||||
- scp dist/*1.15.0* username@tahoe-lafs.org:/home/source/downloads
|
||||
- the following developers have access to do this:
|
||||
|
||||
- exarkun
|
||||
- meejah
|
||||
- warner
|
||||
@ -137,8 +157,9 @@ For the actual release, the tarball and signature files need to be
|
||||
uploaded to PyPI as well.
|
||||
|
||||
- how to do this?
|
||||
- (original guide says only "twine upload dist/*")
|
||||
- (original guide says only `twine upload dist/*`)
|
||||
- the following developers have access to do this:
|
||||
|
||||
- warner
|
||||
- exarkun (partial?)
|
||||
- meejah (partial?)
|
||||
|
@ -127,12 +127,12 @@ def test_deep_stats(alice):
|
||||
dircap_uri,
|
||||
data={
|
||||
u"t": u"upload",
|
||||
u"when_done": u".",
|
||||
},
|
||||
files={
|
||||
u"file": FILE_CONTENTS,
|
||||
},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
|
||||
# confirm the file is in the directory
|
||||
resp = requests.get(
|
||||
|
0
newsfragments/2920.minor
Normal file
0
newsfragments/2920.minor
Normal file
0
newsfragments/3536.minor
Normal file
0
newsfragments/3536.minor
Normal file
0
newsfragments/3589.minor
Normal file
0
newsfragments/3589.minor
Normal file
1
newsfragments/3590.bugfix
Normal file
1
newsfragments/3590.bugfix
Normal file
@ -0,0 +1 @@
|
||||
Fixed issue where redirecting old-style URIs (/uri/?uri=...) didn't work.
|
0
newsfragments/3591.minor
Normal file
0
newsfragments/3591.minor
Normal file
0
newsfragments/3594.minor
Normal file
0
newsfragments/3594.minor
Normal file
0
newsfragments/3595.minor
Normal file
0
newsfragments/3595.minor
Normal file
@ -13,19 +13,30 @@ if PY2:
|
||||
from past.builtins import long, unicode
|
||||
from six import ensure_str
|
||||
|
||||
try:
|
||||
from typing import List
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
import os, time, weakref, itertools
|
||||
|
||||
import attr
|
||||
|
||||
from zope.interface import implementer
|
||||
from twisted.python import failure
|
||||
from twisted.internet import defer
|
||||
from twisted.application import service
|
||||
from foolscap.api import Referenceable, Copyable, RemoteCopy, fireEventually
|
||||
from foolscap.api import Referenceable, Copyable, RemoteCopy
|
||||
|
||||
from allmydata.crypto import aes
|
||||
from allmydata.util.hashutil import file_renewal_secret_hash, \
|
||||
file_cancel_secret_hash, bucket_renewal_secret_hash, \
|
||||
bucket_cancel_secret_hash, plaintext_hasher, \
|
||||
storage_index_hash, plaintext_segment_hasher, convergence_hasher
|
||||
from allmydata.util.deferredutil import timeout_call
|
||||
from allmydata.util.deferredutil import (
|
||||
timeout_call,
|
||||
until,
|
||||
)
|
||||
from allmydata import hashtree, uri
|
||||
from allmydata.storage.server import si_b2a
|
||||
from allmydata.immutable import encode
|
||||
@ -900,13 +911,45 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
|
||||
raise UploadUnhappinessError(msg)
|
||||
|
||||
|
||||
@attr.s
|
||||
class _Accum(object):
|
||||
"""
|
||||
Accumulate up to some known amount of ciphertext.
|
||||
|
||||
:ivar remaining: The number of bytes still expected.
|
||||
:ivar ciphertext: The bytes accumulated so far.
|
||||
"""
|
||||
remaining = attr.ib(validator=attr.validators.instance_of(int)) # type: int
|
||||
ciphertext = attr.ib(default=attr.Factory(list)) # type: List[bytes]
|
||||
|
||||
def extend(self,
|
||||
size, # type: int
|
||||
ciphertext, # type: List[bytes]
|
||||
):
|
||||
"""
|
||||
Accumulate some more ciphertext.
|
||||
|
||||
:param size: The amount of data the new ciphertext represents towards
|
||||
the goal. This may be more than the actual size of the given
|
||||
ciphertext if the source has run out of data.
|
||||
|
||||
:param ciphertext: The new ciphertext to accumulate.
|
||||
"""
|
||||
self.remaining -= size
|
||||
self.ciphertext.extend(ciphertext)
|
||||
|
||||
|
||||
@implementer(IEncryptedUploadable)
|
||||
class EncryptAnUploadable(object):
|
||||
"""This is a wrapper that takes an IUploadable and provides
|
||||
IEncryptedUploadable."""
|
||||
CHUNKSIZE = 50*1024
|
||||
|
||||
def __init__(self, original, log_parent=None, progress=None):
|
||||
def __init__(self, original, log_parent=None, progress=None, chunk_size=None):
|
||||
"""
|
||||
:param chunk_size: The number of bytes to read from the uploadable at a
|
||||
time, or None for some default.
|
||||
"""
|
||||
precondition(original.default_params_set,
|
||||
"set_default_encoding_parameters not called on %r before wrapping with EncryptAnUploadable" % (original,))
|
||||
self.original = IUploadable(original)
|
||||
@ -920,6 +963,8 @@ class EncryptAnUploadable(object):
|
||||
self._ciphertext_bytes_read = 0
|
||||
self._status = None
|
||||
self._progress = progress
|
||||
if chunk_size is not None:
|
||||
self.CHUNKSIZE = chunk_size
|
||||
|
||||
def set_upload_status(self, upload_status):
|
||||
self._status = IUploadStatus(upload_status)
|
||||
@ -1026,47 +1071,53 @@ class EncryptAnUploadable(object):
|
||||
# and size
|
||||
d.addCallback(lambda ignored: self.get_size())
|
||||
d.addCallback(lambda ignored: self._get_encryptor())
|
||||
# then fetch and encrypt the plaintext. The unusual structure here
|
||||
# (passing a Deferred *into* a function) is needed to avoid
|
||||
# overflowing the stack: Deferreds don't optimize out tail recursion.
|
||||
# We also pass in a list, to which _read_encrypted will append
|
||||
# ciphertext.
|
||||
ciphertext = []
|
||||
d2 = defer.Deferred()
|
||||
d.addCallback(lambda ignored:
|
||||
self._read_encrypted(length, ciphertext, hash_only, d2))
|
||||
d.addCallback(lambda ignored: d2)
|
||||
|
||||
accum = _Accum(length)
|
||||
|
||||
def action():
|
||||
"""
|
||||
Read some bytes into the accumulator.
|
||||
"""
|
||||
return self._read_encrypted(accum, hash_only)
|
||||
|
||||
def condition():
|
||||
"""
|
||||
Check to see if the accumulator has all the data.
|
||||
"""
|
||||
return accum.remaining == 0
|
||||
|
||||
d.addCallback(lambda ignored: until(action, condition))
|
||||
d.addCallback(lambda ignored: accum.ciphertext)
|
||||
return d
|
||||
|
||||
def _read_encrypted(self, remaining, ciphertext, hash_only, fire_when_done):
|
||||
if not remaining:
|
||||
fire_when_done.callback(ciphertext)
|
||||
return None
|
||||
def _read_encrypted(self,
|
||||
ciphertext_accum, # type: _Accum
|
||||
hash_only, # type: bool
|
||||
):
|
||||
# type: (...) -> defer.Deferred
|
||||
"""
|
||||
Read the next chunk of plaintext, encrypt it, and extend the accumulator
|
||||
with the resulting ciphertext.
|
||||
"""
|
||||
# tolerate large length= values without consuming a lot of RAM by
|
||||
# reading just a chunk (say 50kB) at a time. This only really matters
|
||||
# when hash_only==True (i.e. resuming an interrupted upload), since
|
||||
# that's the case where we will be skipping over a lot of data.
|
||||
size = min(remaining, self.CHUNKSIZE)
|
||||
remaining = remaining - size
|
||||
size = min(ciphertext_accum.remaining, self.CHUNKSIZE)
|
||||
|
||||
# read a chunk of plaintext..
|
||||
d = defer.maybeDeferred(self.original.read, size)
|
||||
# N.B.: if read() is synchronous, then since everything else is
|
||||
# actually synchronous too, we'd blow the stack unless we stall for a
|
||||
# tick. Once you accept a Deferred from IUploadable.read(), you must
|
||||
# be prepared to have it fire immediately too.
|
||||
d.addCallback(fireEventually)
|
||||
def _good(plaintext):
|
||||
# and encrypt it..
|
||||
# o/' over the fields we go, hashing all the way, sHA! sHA! sHA! o/'
|
||||
ct = self._hash_and_encrypt_plaintext(plaintext, hash_only)
|
||||
ciphertext.extend(ct)
|
||||
self._read_encrypted(remaining, ciphertext, hash_only,
|
||||
fire_when_done)
|
||||
def _err(why):
|
||||
fire_when_done.errback(why)
|
||||
# Intentionally tell the accumulator about the expected size, not
|
||||
# the actual size. If we run out of data we still want remaining
|
||||
# to drop otherwise it will never reach 0 and the loop will never
|
||||
# end.
|
||||
ciphertext_accum.extend(size, ct)
|
||||
d.addCallback(_good)
|
||||
d.addErrback(_err)
|
||||
return None
|
||||
return d
|
||||
|
||||
def _hash_and_encrypt_plaintext(self, data, hash_only):
|
||||
assert isinstance(data, (tuple, list)), type(data)
|
||||
|
@ -74,3 +74,58 @@ class DeferredUtilTests(unittest.TestCase, deferredutil.WaitForDelayedCallsMixin
|
||||
d = defer.succeed(None)
|
||||
d.addBoth(self.wait_for_delayed_calls)
|
||||
return d
|
||||
|
||||
|
||||
class UntilTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for ``deferredutil.until``.
|
||||
"""
|
||||
def test_exception(self):
|
||||
"""
|
||||
If the action raises an exception, the ``Deferred`` returned by ``until``
|
||||
fires with a ``Failure``.
|
||||
"""
|
||||
self.assertFailure(
|
||||
deferredutil.until(lambda: 1/0, lambda: True),
|
||||
ZeroDivisionError,
|
||||
)
|
||||
|
||||
def test_stops_on_condition(self):
|
||||
"""
|
||||
The action is called repeatedly until ``condition`` returns ``True``.
|
||||
"""
|
||||
calls = []
|
||||
def action():
|
||||
calls.append(None)
|
||||
|
||||
def condition():
|
||||
return len(calls) == 3
|
||||
|
||||
self.assertIs(
|
||||
self.successResultOf(
|
||||
deferredutil.until(action, condition),
|
||||
),
|
||||
None,
|
||||
)
|
||||
self.assertEqual(3, len(calls))
|
||||
|
||||
def test_waits_for_deferred(self):
|
||||
"""
|
||||
If the action returns a ``Deferred`` then it is called again when the
|
||||
``Deferred`` fires.
|
||||
"""
|
||||
counter = [0]
|
||||
r1 = defer.Deferred()
|
||||
r2 = defer.Deferred()
|
||||
results = [r1, r2]
|
||||
def action():
|
||||
counter[0] += 1
|
||||
return results.pop(0)
|
||||
|
||||
def condition():
|
||||
return False
|
||||
|
||||
deferredutil.until(action, condition)
|
||||
self.assertEqual([1], counter)
|
||||
r1.callback(None)
|
||||
self.assertEqual([2], counter)
|
||||
|
@ -14,6 +14,17 @@ if PY2:
|
||||
|
||||
import os, shutil
|
||||
from io import BytesIO
|
||||
from base64 import (
|
||||
b64encode,
|
||||
)
|
||||
|
||||
from hypothesis import (
|
||||
given,
|
||||
)
|
||||
from hypothesis.strategies import (
|
||||
just,
|
||||
integers,
|
||||
)
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.python.failure import Failure
|
||||
@ -2029,6 +2040,91 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
|
||||
f.close()
|
||||
return None
|
||||
|
||||
|
||||
class EncryptAnUploadableTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for ``EncryptAnUploadable``.
|
||||
"""
|
||||
def test_same_length(self):
|
||||
"""
|
||||
``EncryptAnUploadable.read_encrypted`` returns ciphertext of the same
|
||||
length as the underlying plaintext.
|
||||
"""
|
||||
plaintext = b"hello world"
|
||||
uploadable = upload.FileHandle(BytesIO(plaintext), None)
|
||||
uploadable.set_default_encoding_parameters({
|
||||
# These values shouldn't matter.
|
||||
"k": 3,
|
||||
"happy": 5,
|
||||
"n": 10,
|
||||
"max_segment_size": 128 * 1024,
|
||||
})
|
||||
encrypter = upload.EncryptAnUploadable(uploadable)
|
||||
ciphertext = b"".join(self.successResultOf(encrypter.read_encrypted(1024, False)))
|
||||
self.assertEqual(len(ciphertext), len(plaintext))
|
||||
|
||||
@given(just(b"hello world"), integers(min_value=0, max_value=len(b"hello world")))
|
||||
def test_known_result(self, plaintext, split_at):
|
||||
"""
|
||||
``EncryptAnUploadable.read_encrypted`` returns a known-correct ciphertext
|
||||
string for certain inputs. The ciphertext is independent of the read
|
||||
sizes.
|
||||
"""
|
||||
convergence = b"\x42" * 16
|
||||
uploadable = upload.FileHandle(BytesIO(plaintext), convergence)
|
||||
uploadable.set_default_encoding_parameters({
|
||||
# The convergence key is a function of k, n, and max_segment_size
|
||||
# (among other things). The value for happy doesn't matter
|
||||
# though.
|
||||
"k": 3,
|
||||
"happy": 5,
|
||||
"n": 10,
|
||||
"max_segment_size": 128 * 1024,
|
||||
})
|
||||
encrypter = upload.EncryptAnUploadable(uploadable)
|
||||
def read(n):
|
||||
return b"".join(self.successResultOf(encrypter.read_encrypted(n, False)))
|
||||
|
||||
# Read the string in one or two pieces to make sure underlying state
|
||||
# is maintained properly.
|
||||
first = read(split_at)
|
||||
second = read(len(plaintext) - split_at)
|
||||
third = read(1)
|
||||
ciphertext = first + second + third
|
||||
|
||||
self.assertEqual(
|
||||
b"Jd2LHCRXozwrEJc=",
|
||||
b64encode(ciphertext),
|
||||
)
|
||||
|
||||
def test_large_read(self):
|
||||
"""
|
||||
``EncryptAnUploadable.read_encrypted`` succeeds even when the requested
|
||||
data length is much larger than the chunk size.
|
||||
"""
|
||||
convergence = b"\x42" * 16
|
||||
# 4kB of plaintext
|
||||
plaintext = b"\xde\xad\xbe\xef" * 1024
|
||||
uploadable = upload.FileHandle(BytesIO(plaintext), convergence)
|
||||
uploadable.set_default_encoding_parameters({
|
||||
"k": 3,
|
||||
"happy": 5,
|
||||
"n": 10,
|
||||
"max_segment_size": 128 * 1024,
|
||||
})
|
||||
# Make the chunk size very small so we don't have to operate on a huge
|
||||
# amount of data to exercise the relevant codepath.
|
||||
encrypter = upload.EncryptAnUploadable(uploadable, chunk_size=1)
|
||||
d = encrypter.read_encrypted(len(plaintext), False)
|
||||
ciphertext = self.successResultOf(d)
|
||||
self.assertEqual(
|
||||
list(map(len, ciphertext)),
|
||||
# Chunk size was specified as 1 above so we will get the whole
|
||||
# plaintext in one byte chunks.
|
||||
[1] * len(plaintext),
|
||||
)
|
||||
|
||||
|
||||
# TODO:
|
||||
# upload with exactly 75 servers (shares_of_happiness)
|
||||
# have a download fail
|
||||
|
@ -1,54 +0,0 @@
|
||||
import json
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
|
||||
from eliot import log_call
|
||||
|
||||
from autobahn.twisted.testing import create_memory_agent, MemoryReactorClockResolver, create_pumper
|
||||
|
||||
from allmydata.web.logs import TokenAuthenticatedWebSocketServerProtocol
|
||||
|
||||
|
||||
class TestStreamingLogs(unittest.TestCase):
|
||||
"""
|
||||
Test websocket streaming of logs
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.reactor = MemoryReactorClockResolver()
|
||||
self.pumper = create_pumper()
|
||||
self.agent = create_memory_agent(self.reactor, self.pumper, TokenAuthenticatedWebSocketServerProtocol)
|
||||
return self.pumper.start()
|
||||
|
||||
def tearDown(self):
|
||||
return self.pumper.stop()
|
||||
|
||||
@inlineCallbacks
|
||||
def test_one_log(self):
|
||||
"""
|
||||
write a single Eliot log and see it streamed via websocket
|
||||
"""
|
||||
|
||||
proto = yield self.agent.open(
|
||||
transport_config=u"ws://localhost:1234/ws",
|
||||
options={},
|
||||
)
|
||||
|
||||
messages = []
|
||||
def got_message(msg, is_binary=False):
|
||||
messages.append(json.loads(msg))
|
||||
proto.on("message", got_message)
|
||||
|
||||
@log_call(action_type=u"test:cli:some-exciting-action")
|
||||
def do_a_thing():
|
||||
pass
|
||||
|
||||
do_a_thing()
|
||||
|
||||
proto.transport.loseConnection()
|
||||
yield proto.is_closed
|
||||
|
||||
self.assertEqual(len(messages), 2)
|
||||
self.assertEqual("started", messages[0]["action_status"])
|
||||
self.assertEqual("succeeded", messages[1]["action_status"])
|
@ -1,3 +1,15 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import json
|
||||
from os.path import join
|
||||
|
||||
@ -213,7 +225,7 @@ class IntroducerRootTests(unittest.TestCase):
|
||||
resource = IntroducerRoot(introducer_node)
|
||||
response = json.loads(
|
||||
self.successResultOf(
|
||||
render(resource, {"t": [b"json"]}),
|
||||
render(resource, {b"t": [b"json"]}),
|
||||
),
|
||||
)
|
||||
self.assertEqual(
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""
|
||||
Tests for ``allmydata.web.logs``.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import (
|
||||
@ -9,6 +11,19 @@ from __future__ import (
|
||||
division,
|
||||
)
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import json
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
|
||||
from eliot import log_call
|
||||
|
||||
from autobahn.twisted.testing import create_memory_agent, MemoryReactorClockResolver, create_pumper
|
||||
|
||||
from testtools.matchers import (
|
||||
Equals,
|
||||
)
|
||||
@ -37,6 +52,7 @@ from ..common import (
|
||||
|
||||
from ...web.logs import (
|
||||
create_log_resources,
|
||||
TokenAuthenticatedWebSocketServerProtocol,
|
||||
)
|
||||
|
||||
class StreamingEliotLogsTests(SyncTestCase):
|
||||
@ -57,3 +73,47 @@ class StreamingEliotLogsTests(SyncTestCase):
|
||||
self.client.get(b"http:///v1"),
|
||||
succeeded(has_response_code(Equals(OK))),
|
||||
)
|
||||
|
||||
|
||||
class TestStreamingLogs(unittest.TestCase):
|
||||
"""
|
||||
Test websocket streaming of logs
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.reactor = MemoryReactorClockResolver()
|
||||
self.pumper = create_pumper()
|
||||
self.agent = create_memory_agent(self.reactor, self.pumper, TokenAuthenticatedWebSocketServerProtocol)
|
||||
return self.pumper.start()
|
||||
|
||||
def tearDown(self):
|
||||
return self.pumper.stop()
|
||||
|
||||
@inlineCallbacks
|
||||
def test_one_log(self):
|
||||
"""
|
||||
write a single Eliot log and see it streamed via websocket
|
||||
"""
|
||||
|
||||
proto = yield self.agent.open(
|
||||
transport_config=u"ws://localhost:1234/ws",
|
||||
options={},
|
||||
)
|
||||
|
||||
messages = []
|
||||
def got_message(msg, is_binary=False):
|
||||
messages.append(json.loads(msg))
|
||||
proto.on("message", got_message)
|
||||
|
||||
@log_call(action_type=u"test:cli:some-exciting-action")
|
||||
def do_a_thing():
|
||||
pass
|
||||
|
||||
do_a_thing()
|
||||
|
||||
proto.transport.loseConnection()
|
||||
yield proto.is_closed
|
||||
|
||||
self.assertEqual(len(messages), 2)
|
||||
self.assertEqual("started", messages[0]["action_status"])
|
||||
self.assertEqual("succeeded", messages[1]["action_status"])
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""
|
||||
Tests for ``allmydata.web.private``.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import (
|
||||
@ -9,6 +11,10 @@ from __future__ import (
|
||||
division,
|
||||
)
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from testtools.matchers import (
|
||||
Equals,
|
||||
)
|
||||
@ -56,6 +62,7 @@ class PrivacyTests(SyncTestCase):
|
||||
return super(PrivacyTests, self).setUp()
|
||||
|
||||
def _authorization(self, scheme, value):
|
||||
value = str(value, "utf-8")
|
||||
return Headers({
|
||||
u"authorization": [u"{} {}".format(scheme, value)],
|
||||
})
|
||||
@ -90,7 +97,7 @@ class PrivacyTests(SyncTestCase):
|
||||
self.assertThat(
|
||||
self.client.head(
|
||||
b"http:///foo/bar",
|
||||
headers=self._authorization(SCHEME, u"foo bar"),
|
||||
headers=self._authorization(str(SCHEME, "utf-8"), b"foo bar"),
|
||||
),
|
||||
succeeded(has_response_code(Equals(UNAUTHORIZED))),
|
||||
)
|
||||
@ -103,7 +110,7 @@ class PrivacyTests(SyncTestCase):
|
||||
self.assertThat(
|
||||
self.client.head(
|
||||
b"http:///foo/bar",
|
||||
headers=self._authorization(SCHEME, self.token),
|
||||
headers=self._authorization(str(SCHEME, "utf-8"), self.token),
|
||||
),
|
||||
# It's a made up URL so we don't get a 200, either, but a 404.
|
||||
succeeded(has_response_code(Equals(NOT_FOUND))),
|
||||
|
@ -1,7 +1,20 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import time
|
||||
|
||||
|
||||
from six.moves.urllib.parse import quote
|
||||
from urllib.parse import (
|
||||
quote,
|
||||
)
|
||||
|
||||
from bs4 import (
|
||||
BeautifulSoup,
|
||||
@ -76,7 +89,7 @@ class RenderSlashUri(unittest.TestCase):
|
||||
)
|
||||
self.assertEqual(
|
||||
response_body,
|
||||
"Invalid capability",
|
||||
b"Invalid capability",
|
||||
)
|
||||
|
||||
|
||||
@ -91,7 +104,7 @@ class RenderServiceRow(unittest.TestCase):
|
||||
ann = {"anonymous-storage-FURL": "pb://w2hqnbaa25yw4qgcvghl5psa3srpfgw3@tcp:127.0.0.1:51309/vucto2z4fxment3vfxbqecblbf6zyp6x",
|
||||
"permutation-seed-base32": "w2hqnbaa25yw4qgcvghl5psa3srpfgw3",
|
||||
}
|
||||
srv = NativeStorageServer("server_id", ann, None, {}, EMPTY_CLIENT_CONFIG)
|
||||
srv = NativeStorageServer(b"server_id", ann, None, {}, EMPTY_CLIENT_CONFIG)
|
||||
srv.get_connection_status = lambda: ConnectionStatus(False, "summary", {}, 0, 0)
|
||||
|
||||
class FakeClient(_Client):
|
||||
@ -102,7 +115,7 @@ class RenderServiceRow(unittest.TestCase):
|
||||
tub_maker=None,
|
||||
node_config=EMPTY_CLIENT_CONFIG,
|
||||
)
|
||||
self.storage_broker.test_add_server("test-srv", srv)
|
||||
self.storage_broker.test_add_server(b"test-srv", srv)
|
||||
|
||||
root = RootElement(FakeClient(), time.time)
|
||||
req = DummyRequest(b"")
|
||||
|
@ -4757,6 +4757,31 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
op_url = self.webish_url + "/operations/134?t=status&output=JSON"
|
||||
yield self.assertHTTPError(op_url, 404, "unknown/expired handle '134'")
|
||||
|
||||
@inlineCallbacks
|
||||
def test_uri_redirect(self):
|
||||
"""URI redirects don't cause failure.
|
||||
|
||||
Unit test reproducer for https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3590
|
||||
"""
|
||||
def req(method, path, **kwargs):
|
||||
return treq.request(method, self.webish_url + path, persistent=False,
|
||||
**kwargs)
|
||||
|
||||
response = yield req("POST", "/uri?format=sdmf&t=mkdir")
|
||||
dircap = yield response.content()
|
||||
assert dircap.startswith('URI:DIR2:')
|
||||
dircap_uri = "/uri/?uri={}&t=json".format(urllib.quote(dircap))
|
||||
|
||||
response = yield req(
|
||||
"GET",
|
||||
dircap_uri,
|
||||
)
|
||||
self.assertEqual(
|
||||
response.request.absoluteURI,
|
||||
self.webish_url + "/uri/{}?t=json".format(urllib.quote(dircap)))
|
||||
if response.code >= 400:
|
||||
raise Error(response.code, response=response.content())
|
||||
|
||||
def test_incident(self):
|
||||
d = self.POST("/report_incident", details="eek")
|
||||
def _done(res):
|
||||
|
@ -114,6 +114,7 @@ PORTED_MODULES = [
|
||||
"allmydata.util.spans",
|
||||
"allmydata.util.statistics",
|
||||
"allmydata.util.time_format",
|
||||
"allmydata.web.logs",
|
||||
"allmydata.webish",
|
||||
]
|
||||
|
||||
@ -187,6 +188,10 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_util",
|
||||
"allmydata.test.web.test_common",
|
||||
"allmydata.test.web.test_grid",
|
||||
"allmydata.test.web.test_introducer",
|
||||
"allmydata.test.web.test_logs",
|
||||
"allmydata.test.web.test_private",
|
||||
"allmydata.test.web.test_root",
|
||||
"allmydata.test.web.test_status",
|
||||
"allmydata.test.web.test_util",
|
||||
"allmydata.test.web.test_webish",
|
||||
|
@ -15,7 +15,18 @@ if PY2:
|
||||
|
||||
import time
|
||||
|
||||
try:
|
||||
from typing import (
|
||||
Callable,
|
||||
Any,
|
||||
)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from foolscap.api import eventually
|
||||
from eliot.twisted import (
|
||||
inline_callbacks,
|
||||
)
|
||||
from twisted.internet import defer, reactor, error
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
@ -201,3 +212,22 @@ class WaitForDelayedCallsMixin(PollMixin):
|
||||
d.addErrback(log.err, "error while waiting for delayed calls")
|
||||
d.addBoth(lambda ign: res)
|
||||
return d
|
||||
|
||||
@inline_callbacks
|
||||
def until(
|
||||
action, # type: Callable[[], defer.Deferred[Any]]
|
||||
condition, # type: Callable[[], bool]
|
||||
):
|
||||
# type: (...) -> defer.Deferred[None]
|
||||
"""
|
||||
Run a Deferred-returning function until a condition is true.
|
||||
|
||||
:param action: The action to run.
|
||||
:param condition: The predicate signaling stop.
|
||||
|
||||
:return: A Deferred that fires after the condition signals stop.
|
||||
"""
|
||||
while True:
|
||||
yield action()
|
||||
if condition():
|
||||
break
|
||||
|
@ -26,10 +26,10 @@ class IntroducerRoot(MultiFormatResource):
|
||||
self.introducer_node = introducer_node
|
||||
self.introducer_service = introducer_node.getServiceNamed("introducer")
|
||||
# necessary as a root Resource
|
||||
self.putChild("", self)
|
||||
self.putChild(b"", self)
|
||||
static_dir = resource_filename("allmydata.web", "static")
|
||||
for filen in os.listdir(static_dir):
|
||||
self.putChild(filen, static.File(os.path.join(static_dir, filen)))
|
||||
self.putChild(filen.encode("utf-8"), static.File(os.path.join(static_dir, filen)))
|
||||
|
||||
def _create_element(self):
|
||||
"""
|
||||
@ -66,7 +66,7 @@ class IntroducerRoot(MultiFormatResource):
|
||||
announcement_summary[service_name] += 1
|
||||
res[u"announcement_summary"] = announcement_summary
|
||||
|
||||
return json.dumps(res, indent=1) + b"\n"
|
||||
return (json.dumps(res, indent=1) + "\n").encode("utf-8")
|
||||
|
||||
|
||||
class IntroducerRootElement(Element):
|
||||
|
@ -1,3 +1,6 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import (
|
||||
print_function,
|
||||
unicode_literals,
|
||||
@ -49,7 +52,11 @@ class TokenAuthenticatedWebSocketServerProtocol(WebSocketServerProtocol):
|
||||
"""
|
||||
# probably want a try/except around here? what do we do if
|
||||
# transmission fails or anything else bad happens?
|
||||
self.sendMessage(json.dumps(message))
|
||||
encoded = json.dumps(message)
|
||||
if isinstance(encoded, str):
|
||||
# On Python 3 dumps() returns Unicode...
|
||||
encoded = encoded.encode("utf-8")
|
||||
self.sendMessage(encoded)
|
||||
|
||||
def onOpen(self):
|
||||
"""
|
||||
|
@ -1,6 +1,8 @@
|
||||
from future.utils import PY3
|
||||
|
||||
import os
|
||||
import time
|
||||
import urllib
|
||||
from urllib.parse import quote as urlquote
|
||||
|
||||
from hyperlink import DecodedURL, URL
|
||||
from pkg_resources import resource_filename
|
||||
@ -9,7 +11,7 @@ from twisted.web import (
|
||||
resource,
|
||||
static,
|
||||
)
|
||||
from twisted.web.util import redirectTo
|
||||
from twisted.web.util import redirectTo, Redirect
|
||||
from twisted.python.filepath import FilePath
|
||||
from twisted.web.template import (
|
||||
Element,
|
||||
@ -81,7 +83,7 @@ class URIHandler(resource.Resource, object):
|
||||
# it seems Nevow was creating absolute URLs including
|
||||
# host/port whereas req.uri is absolute (but lacks host/port)
|
||||
redir_uri = URL.from_text(req.prePathURL().decode('utf8'))
|
||||
redir_uri = redir_uri.child(urllib.quote(uri_arg).decode('utf8'))
|
||||
redir_uri = redir_uri.child(urlquote(uri_arg))
|
||||
# add back all the query args that AREN'T "?uri="
|
||||
for k, values in req.args.items():
|
||||
if k != b"uri":
|
||||
@ -145,7 +147,7 @@ class URIHandler(resource.Resource, object):
|
||||
and creates and appropriate handler (depending on the kind of
|
||||
capability it was passed).
|
||||
"""
|
||||
# this is in case a URI like "/uri/?cap=<valid capability>" is
|
||||
# this is in case a URI like "/uri/?uri=<valid capability>" is
|
||||
# passed -- we re-direct to the non-trailing-slash version so
|
||||
# that there is just one valid URI for "uri" resource.
|
||||
if not name:
|
||||
@ -153,7 +155,7 @@ class URIHandler(resource.Resource, object):
|
||||
u = u.replace(
|
||||
path=(s for s in u.path if s), # remove empty segments
|
||||
)
|
||||
return redirectTo(u.to_uri().to_text().encode('utf8'), req)
|
||||
return Redirect(u.to_uri().to_text().encode('utf8'))
|
||||
try:
|
||||
node = self.client.create_node_from_uri(name)
|
||||
return directory.make_handler_for(node, self.client)
|
||||
@ -227,26 +229,26 @@ class Root(MultiFormatResource):
|
||||
self._client = client
|
||||
self._now_fn = now_fn
|
||||
|
||||
# Children need to be bytes; for now just doing these to make specific
|
||||
# tests pass on Python 3, but eventually will do all them when this
|
||||
# module is ported to Python 3 (if not earlier).
|
||||
self.putChild(b"uri", URIHandler(client))
|
||||
self.putChild("cap", URIHandler(client))
|
||||
self.putChild(b"cap", URIHandler(client))
|
||||
|
||||
# Handler for everything beneath "/private", an area of the resource
|
||||
# hierarchy which is only accessible with the private per-node API
|
||||
# auth token.
|
||||
self.putChild("private", create_private_tree(client.get_auth_token))
|
||||
self.putChild(b"private", create_private_tree(client.get_auth_token))
|
||||
|
||||
self.putChild("file", FileHandler(client))
|
||||
self.putChild("named", FileHandler(client))
|
||||
self.putChild("status", status.Status(client.get_history()))
|
||||
self.putChild("statistics", status.Statistics(client.stats_provider))
|
||||
self.putChild(b"file", FileHandler(client))
|
||||
self.putChild(b"named", FileHandler(client))
|
||||
self.putChild(b"status", status.Status(client.get_history()))
|
||||
self.putChild(b"statistics", status.Statistics(client.stats_provider))
|
||||
static_dir = resource_filename("allmydata.web", "static")
|
||||
for filen in os.listdir(static_dir):
|
||||
self.putChild(filen, static.File(os.path.join(static_dir, filen)))
|
||||
child_path = filen
|
||||
if PY3:
|
||||
child_path = filen.encode("utf-8")
|
||||
self.putChild(child_path, static.File(os.path.join(static_dir, filen)))
|
||||
|
||||
self.putChild("report_incident", IncidentReporter())
|
||||
self.putChild(b"report_incident", IncidentReporter())
|
||||
|
||||
@exception_to_child
|
||||
def getChild(self, path, request):
|
||||
|
Loading…
Reference in New Issue
Block a user