Merge pull request #1266 from tahoe-lafs/3965.better-max-size

Add more restrictive CBOR content limits to server

Fixes: ticket:3965
This commit is contained in:
Jean-Paul Calderone 2023-03-10 11:13:15 -05:00 committed by GitHub
commit 374d2fc499
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 14 additions and 3 deletions

0
newsfragments/3965.minor Normal file
View File

View File

@ -606,7 +606,10 @@ class HTTPServer(object):
async def allocate_buckets(self, request, authorization, storage_index):
"""Allocate buckets."""
upload_secret = authorization[Secrets.UPLOAD]
info = await self._read_encoded(request, _SCHEMAS["allocate_buckets"])
# It's just a list of up to ~256 shares, shouldn't use many bytes.
info = await self._read_encoded(
request, _SCHEMAS["allocate_buckets"], max_size=8192
)
# We do NOT validate the upload secret for existing bucket uploads.
# Another upload may be happening in parallel, with a different upload
@ -773,7 +776,11 @@ class HTTPServer(object):
except KeyError:
raise _HTTPError(http.NOT_FOUND)
info = await self._read_encoded(request, _SCHEMAS["advise_corrupt_share"])
# The reason can be a string with explanation, so in theory it could be
# longish?
info = await self._read_encoded(
request, _SCHEMAS["advise_corrupt_share"], max_size=32768,
)
bucket.advise_corrupt_share(info["reason"].encode("utf-8"))
return b""
@ -872,7 +879,11 @@ class HTTPServer(object):
}:
raise _HTTPError(http.NOT_FOUND)
info = await self._read_encoded(request, _SCHEMAS["advise_corrupt_share"])
# The reason can be a string with explanation, so in theory it could be
# longish?
info = await self._read_encoded(
request, _SCHEMAS["advise_corrupt_share"], max_size=32768
)
self._storage_server.advise_corrupt_share(
b"mutable", storage_index, share_number, info["reason"].encode("utf-8")
)