diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 34a4e0875..fd5049104 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,7 +21,7 @@ jobs: steps: - # Get vcpython27 on Windows + Python 2.7, to build zfec + # Get vcpython27 on Windows + Python 2.7, to build netifaces # extension. See https://chocolatey.org/packages/vcpython27 and # https://github.com/crazy-max/ghaction-chocolatey - name: Install MSVC 9.0 for Python 2.7 [Windows] @@ -78,6 +78,15 @@ jobs: steps: + # Get vcpython27 for Windows + Python 2.7, to build netifaces + # extension. See https://chocolatey.org/packages/vcpython27 and + # https://github.com/crazy-max/ghaction-chocolatey + - name: Install MSVC 9.0 for Python 2.7 [Windows] + if: matrix.os == 'windows-latest' && matrix.python-version == '2.7' + uses: crazy-max/ghaction-chocolatey@v1 + with: + args: install vcpython27 + - name: Install Tor [Ubuntu] if: matrix.os == 'ubuntu-latest' run: sudo apt install tor @@ -92,12 +101,6 @@ jobs: with: args: install tor - - name: Install MSVC 9.0 for Python 2.7 [Windows] - if: matrix.os == 'windows-latest' && matrix.python-version == '2.7' - uses: crazy-max/ghaction-chocolatey@v1 - with: - args: install vcpython27 - - name: Check out Tahoe-LAFS sources uses: actions/checkout@v2 @@ -141,7 +144,7 @@ jobs: steps: - # Get vcpython27 on Windows + Python 2.7, to build zfec + # Get vcpython27 for Windows + Python 2.7, to build netifaces # extension. See https://chocolatey.org/packages/vcpython27 and # https://github.com/crazy-max/ghaction-chocolatey - name: Install MSVC 9.0 for Python 2.7 [Windows] diff --git a/docs/INSTALL.rst b/docs/INSTALL.rst index ab9b5a743..3a724b790 100644 --- a/docs/INSTALL.rst +++ b/docs/INSTALL.rst @@ -39,9 +39,7 @@ If you are on Windows, please see :doc:`windows` for platform-specific instructions. If you are on a Mac, you can either follow these instructions, or use the -pre-packaged bundle described in :doc:`OS-X`. The Tahoe project hosts -pre-compiled "wheels" for all dependencies, so use the ``--find-links=`` -option described below to avoid needing a compiler. +pre-packaged bundle described in :doc:`OS-X`. Many Linux distributions include Tahoe-LAFS packages. Debian and Ubuntu users can ``apt-get install tahoe-lafs``. See `OSPackages`_ for other @@ -54,9 +52,14 @@ Preliminaries ============= If you don't use a pre-packaged copy of Tahoe, you can build it yourself. -You'll need Python2.7, pip, and virtualenv. On unix-like platforms, you will -need a C compiler, the Python development headers, and some libraries -(libffi-dev and libssl-dev). +You'll need Python2.7, pip, and virtualenv. +Tahoe-LAFS depends on some libraries which require a C compiler to build. +However, for many platforms, PyPI hosts already-built packages of libraries. + +If there is no already-built package for your platform, +you will need a C compiler, +the Python development headers, +and some libraries (libffi-dev and libssl-dev). On a modern Debian/Ubuntu-derived distribution, this command will get you everything you need:: @@ -64,8 +67,7 @@ everything you need:: apt-get install build-essential python-dev libffi-dev libssl-dev libyaml-dev python-virtualenv On OS-X, install pip and virtualenv as described below. If you want to -compile the dependencies yourself (instead of using ``--find-links`` to take -advantage of the pre-compiled ones we host), you'll also need to install +compile the dependencies yourself, you'll also need to install Xcode and its command-line tools. **Note** that Tahoe-LAFS depends on `openssl 1.1.1c` or greater. @@ -150,30 +152,24 @@ from PyPI with ``venv/bin/pip install tahoe-lafs``. After installation, run % virtualenv venv New python executable in ~/venv/bin/python2.7 Installing setuptools, pip, wheel...done. - + % venv/bin/pip install -U pip setuptools Downloading/unpacking pip from https://pypi.python.org/... ... Successfully installed pip setuptools - + % venv/bin/pip install tahoe-lafs Collecting tahoe-lafs ... Installing collected packages: ... Successfully installed ... - + % venv/bin/tahoe --version tahoe-lafs: 1.14.0 foolscap: ... - + % -On OS-X, instead of ``pip install tahoe-lafs``, use this command to take -advantage of the hosted pre-compiled wheels:: - - venv/bin/pip install --find-links=https://tahoe-lafs.org/deps tahoe-lafs - - Install From a Source Tarball ----------------------------- @@ -182,13 +178,13 @@ You can also install directly from the source tarball URL:: % virtualenv venv New python executable in ~/venv/bin/python2.7 Installing setuptools, pip, wheel...done. - + % venv/bin/pip install https://tahoe-lafs.org/downloads/tahoe-lafs-1.14.0.tar.bz2 Collecting https://tahoe-lafs.org/downloads/tahoe-lafs-1.14.0.tar.bz2 ... Installing collected packages: ... Successfully installed ... - + % venv/bin/tahoe --version tahoe-lafs: 1.14.0 ... @@ -213,16 +209,16 @@ with the ``--editable`` flag. You should also use the ``[test]`` extra to get the additional libraries needed to run the unit tests:: % git clone https://github.com/tahoe-lafs/tahoe-lafs.git - + % cd tahoe-lafs - + % virtualenv venv - + % venv/bin/pip install --editable .[test] Obtaining file::~/tahoe-lafs ... Successfully installed ... - + % venv/bin/tahoe --version tahoe-lafs: 1.14.0.post34.dev0 ... @@ -282,7 +278,7 @@ result in a "all tests passed" mesage:: test_missing_signature ... [OK] ... Ran 1186 tests in 423.179s - + PASSED (skips=7, expectedFailures=3, successes=1176) __________________________ summary ___________________________________ py27: commands succeeded diff --git a/docs/how_to_make_a_tahoe-lafs_release.org b/docs/how_to_make_a_tahoe-lafs_release.org deleted file mode 100644 index b3f2a84d7..000000000 --- a/docs/how_to_make_a_tahoe-lafs_release.org +++ /dev/null @@ -1,110 +0,0 @@ -How to Make a Tahoe-LAFS Release - -Any developer with push priveleges can do most of these steps, but a -"Release Maintainer" is required for some signing operations -- these -steps are marked with (Release Maintainer). Currently, the following -people are Release Maintainers: - - - Brian Warner (https://github.com/warner) - - -* select features/PRs for new release [0/2] - - [ ] made sure they are tagged/labeled - - [ ] merged all release PRs - -* basic quality checks [0/3] - - [ ] all travis CI checks pass - - [ ] all appveyor checks pass - - [ ] all buildbot workers pass their checks - -* freeze master branch [0/1] - - [ ] announced the freeze of the master branch on IRC (i.e. non-release PRs won't be merged until after release) - -* sync documentation [0/7] - - - [ ] NEWS.rst: (run "tox -e news") - - [ ] added final release name and date to top-most item in NEWS.rst - - [ ] updated relnotes.txt (change next, last versions; summarize NEWS) - - [ ] updated CREDITS - - [ ] updated docs/known_issues.rst - - [ ] docs/INSTALL.rst only points to current tahoe-lafs-X.Y.Z.tar.gz source code file - - [ ] updated https://tahoe-lafs.org/hacktahoelafs/ - -* sign + build the tag [0/8] - - - [ ] code passes all checks / tests (i.e. all CI is green) - - [ ] documentation is ready (see above) - - [ ] (Release Maintainer): git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-X.Y.Z" tahoe-lafs-X.Y.Z - - [ ] build code locally: - tox -e py27,codechecks,deprecations,docs,integration,upcoming-deprecations - - [ ] created tarballs (they'll be in dist/ for later comparison) - tox -e tarballs - - [ ] release version is reporting itself as intended version - ls dist/ - - [ ] 'git pull' doesn't pull anything - - [ ] pushed tag to trigger buildslaves - git push official master TAGNAME - - [ ] confirmed Dockerhub built successfully: - https://hub.docker.com/r/tahoelafs/base/builds/ - -* sign the release artifacts [0/8] - - - [ ] (Release Maintainer): pushed signed tag (should trigger Buildbot builders) - - [ ] Buildbot workers built all artifacts successfully - - [ ] downloaded upstream tarballs+wheels - - [ ] announce on IRC that master is unlocked - - [ ] compared upstream tarballs+wheels against local copies - - [ ] (Release Maintainer): signed each upstream artifacts with "gpg -ba -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A FILE" - - [ ] added to relnotes.txt: [0/3] - - [ ] prefix with SHA256 of tarballs - - [ ] release pubkey - - [ ] git revision hash - - [ ] GPG-signed the release email with release key (write to - relnotes.txt.asc) Ideally this is a Release Maintainer, but could - be any developer - -* publish release artifacts [0/9] - - - [ ] uploaded to PyPI via: twine upload dist/* - - [ ] uploaded *.asc to org ~source/downloads/ - - [ ] test install works properly: pip install tahoe-lafs - - [ ] copied the release tarballs and signatures to tahoe-lafs.org: ~source/downloads/ - - [ ] moved old release out of ~source/downloads (to downloads/old/?) - - [ ] ensured readthedocs.org updated - - [ ] uploaded wheels to https://tahoe-lafs.org/deps/ - - [ ] uploaded release to https://github.com/tahoe-lafs/tahoe-lafs/releases - -* check release downloads [0/] - - - [ ] test PyPI via: pip install tahoe-lafs - - [ ] https://github.com/tahoe-lafs/tahoe-lafs/releases - - [ ] https://tahoe-lafs.org/downloads/ - - [ ] https://tahoe-lafs.org/deps/ - -* document release in trac [0/] - - - [ ] closed the Milestone on the trac Roadmap - -* unfreeze master branch [0/] - - - [ ] announced on IRC that new PRs will be looked at/merged - -* announce new release [0/] - - - [ ] sent release email and relnotes.txt.asc to tahoe-announce@tahoe-lafs.org - - [ ] sent release email and relnotes.txt.asc to tahoe-dev@tahoe-lafs.org - - [ ] updated Wiki front page: version on download link, News column - - [ ] updated Wiki "Doc": parade of release notes (with rev of NEWS.rst) - - [ ] make an "announcement of new release" on freshmeat (XXX still a thing?) - - [ ] make an "announcement of new release" on launchpad - - [ ] tweeted as @tahoelafs - - [ ] emailed relnotes.txt.asc to below listed mailing-lists/organizations - - [ ] also announce release to (trimmed from previous version of this doc): - - twisted-python@twistedmatrix.com - - liberationtech@lists.stanford.edu - - lwn@lwn.net - - p2p-hackers@lists.zooko.com - - python-list@python.org - - http://listcultures.org/pipermail/p2presearch_listcultures.org/ - - cryptopp-users@googlegroups.com - - (others?) diff --git a/docs/release-checklist.rst b/docs/release-checklist.rst new file mode 100644 index 000000000..be32aea6c --- /dev/null +++ b/docs/release-checklist.rst @@ -0,0 +1,197 @@ + +================= +Release Checklist +================= + +These instructions were produced while making the 1.15.0 release. They +are based on the original instructions (in old revisions in the file +`docs/how_to_make_a_tahoe-lafs_release.org`). + +Any contributor can do the first part of the release preparation. Only +certain contributors can perform other parts. These are the two main +sections of this checklist (and could be done by different people). + +A final section describes how to announce the release. + + +Any Contributor +--------------- + +Anyone who can create normal PRs should be able to complete this +portion of the release process. + + +Prepare for the Release +``````````````````````` + +The `master` branch should always be releasable. + +It may be worth asking (on IRC or mailing-ist) if anything will be +merged imminently (for example, "I will prepare a release this coming +Tuesday if you want to get anything in"). + +- Create a ticket for the release in Trac +- Ticket number needed in next section + + +Create Branch and Apply Updates +``````````````````````````````` + +- Create a branch for release-candidates (e.g. `XXXX.release-1.15.0.rc0`) +- run `tox -e news` to produce a new NEWS.txt file (this does a commit) +- create the news for the release + - newsfragments/.minor + - commit it +- manually fix NEWS.txt + - proper title for latest release ("Release 1.15.0" instead of "Release ...post1432") + - double-check date (maybe release will be in the future) + - spot-check the release notes (these come from the newsfragments + files though so don't do heavy editing) + - commit these changes +- update "relnotes.txt" + - update all mentions of 1.14.0 -> 1.15.0 + - update "previous release" statement and date + - summarize major changes + - commit it +- update "CREDITS" + - are there any new contributors in this release? + - one way: git log release-1.14.0.. | grep Author | sort | uniq + - commit it +- update "docs/known_issues.rst" if appropriate +- update "docs/INSTALL.rst" references to the new release +- Push the branch to github +- Create a (draft) PR; this should trigger CI (note that github + doesn't let you create a PR without some changes on the branch so + running + committing the NEWS.txt file achieves that without changing + any code) +- Confirm CI runs successfully on all platforms + + +Create Release Candidate +```````````````````````` + +Before "officially" tagging any release, we will make a +release-candidate available. So there will be at least 1.15.0rc0 (for +example). If there are any problems, an rc1 or rc2 etc may also be +released. Anyone can sign these releases (ideally they'd be signed +"officially" as well, but it's better to get them out than to wait for +that). + +Typically expert users will be the ones testing release candidates and +they will need to evaluate which contributors' signatures they trust. + +- (all steps above are completed) +- sign the release + - git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-1.15.0rc0" tahoe-lafs-1.15.0rc0 + - (replace the key-id above with your own) +- build all code locally + - these should all pass: + - tox -e py27,codechecks,docs,integration + - these can fail (ideally they should not of course): + - tox -e deprecations,upcoming-deprecations +- build tarballs + - tox -e tarballs + - confirm it at least exists: + - ls dist/ | grep 1.15.0rc0 +- inspect and test the tarballs + - install each in a fresh virtualenv + - run `tahoe` command +- when satisfied, sign the tarballs: + - gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0-py2-none-any.whl + - gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.tar.bz2 + - gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.tar.gz + - gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.zip + + +Privileged Contributor +----------------------- + +Steps in this portion require special access to keys or +infrastructure. For example, **access to tahoe-lafs.org** to upload +binaries or edit HTML. + + +Hack Tahoe-LAFS +``````````````` + +Did anyone contribute a hack since the last release? If so, then +https://tahoe-lafs.org/hacktahoelafs/ needs to be updated. + + +Upload Artifacts +```````````````` + +Any release-candidate or actual release plus signature (.asc file) +need to be uploaded to https://tahoe-lafs.org in `~source/downloads` + +- secure-copy all release artifacts to the download area on the + tahoe-lafs.org host machine. `~source/downloads` on there maps to + https://tahoe-lafs.org/downloads/ on the Web. +- scp dist/*1.15.0* username@tahoe-lafs.org:/home/source/downloads +- the following developers have access to do this: + - exarkun + - meejah + - warner + +For the actual release, the tarball and signature files need to be +uploaded to PyPI as well. + +- how to do this? +- (original guide says only "twine upload dist/*") +- the following developers have access to do this: + - warner + - exarkun (partial?) + - meejah (partial?) + +Announcing the Release Candidate +```````````````````````````````` + +The release-candidate should be announced by posting to the +mailing-list (tahoe-dev@tahoe-lafs.org). For example: +https://tahoe-lafs.org/pipermail/tahoe-dev/2020-October/009995.html + + +Is The Release Done Yet? +```````````````````````` + +If anyone reports a problem with a release-candidate then a new +release-candidate should be made once a fix has been merged to +master. Repeat the above instructions with `rc1` or `rc2` or whatever +is appropriate. + +Once a release-candidate has marinated for some time then it can be +made into a the actual release. + +XXX Write this section when doing 1.15.0 actual release + +(In general, this means dropping the "rcX" part of the release and the +tag, uploading those artifacts, uploading to PyPI, ... ) + + + +Announcing the Release +---------------------- + + +mailing-lists +````````````` + +A new Tahoe release is traditionally announced on our mailing-list +(tahoe-dev@tahoe-lafs.org). The former version of these instructions +also announced the release on the following other lists: + +- tahoe-announce@tahoe-lafs.org +- twisted-python@twistedmatrix.com +- liberationtech@lists.stanford.edu +- lwn@lwn.net +- p2p-hackers@lists.zooko.com +- python-list@python.org +- http://listcultures.org/pipermail/p2presearch_listcultures.org/ +- cryptopp-users@googlegroups.com + + +wiki +```` + +Edit the "News" section of the front page of https://tahoe-lafs.org +with a link to the mailing-list archive of the announcement message. diff --git a/docs/windows.rst b/docs/windows.rst index 568e502bc..1f69ac743 100644 --- a/docs/windows.rst +++ b/docs/windows.rst @@ -33,7 +33,7 @@ You can use whatever name you like for the virtualenv, but example uses 3: Use the virtualenv's ``pip`` to install the latest release of Tahoe-LAFS into this virtualenv:: - PS C:\Users\me> venv\Scripts\pip install --find-links=https://tahoe-lafs.org/deps/ tahoe-lafs + PS C:\Users\me> venv\Scripts\pip install tahoe-lafs Collecting tahoe-lafs ... Installing collected packages: ... @@ -69,7 +69,7 @@ The ``pip install tahoe-lafs`` command above will install the latest release the following command (using pip from the virtualenv, from the root of your git checkout):: - $ venv\Scripts\pip install --find-links=https://tahoe-lafs.org/deps/ . + $ venv\Scripts\pip install . If you're planning to hack on the source code, you might want to add ``--editable`` so you won't have to re-install each time you make a change. @@ -77,12 +77,7 @@ If you're planning to hack on the source code, you might want to add Dependencies ------------ -Tahoe-LAFS depends upon several packages that use compiled C code -(such as zfec). This code must be built separately for each platform -(Windows, OS-X, and different flavors of Linux). - -Pre-compiled "wheels" of all Tahoe's dependencies are hosted on the -tahoe-lafs.org website in the ``deps/`` directory. The ``--find-links=`` -argument (used in the examples above) instructs ``pip`` to look at that URL -for dependencies. This should avoid the need for anything to be compiled -during the install. +Tahoe-LAFS depends upon several packages that use compiled C code (such as zfec). +This code must be built separately for each platform (Windows, OS-X, and different flavors of Linux). +Fortunately, this is now done by upstream packages for most platforms. +The result is that a C compiler is usually not required to install Tahoe-LAFS. diff --git a/integration/util.py b/integration/util.py index a64bcbf8e..9e8e74246 100644 --- a/integration/util.py +++ b/integration/util.py @@ -257,7 +257,12 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam def created(_): config_path = join(node_dir, 'tahoe.cfg') config = get_config(config_path) - set_config(config, 'node', 'log_gatherer.furl', flog_gatherer) + set_config( + config, + u'node', + u'log_gatherer.furl', + flog_gatherer.decode("utf-8"), + ) write_config(config_path, config) created_d.addCallback(created) diff --git a/newsfragments/1549.installation b/newsfragments/1549.installation new file mode 100644 index 000000000..cbb91cea5 --- /dev/null +++ b/newsfragments/1549.installation @@ -0,0 +1 @@ +Tahoe-LAFS now requires Twisted 19.10.0 or newer. As a result, it now has a transitive dependency on bcrypt. diff --git a/newsfragments/3477.minor b/newsfragments/3477.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3478.minor b/newsfragments/3478.minor new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/newsfragments/3478.minor @@ -0,0 +1 @@ + diff --git a/newsfragments/3497.installation b/newsfragments/3497.installation new file mode 100644 index 000000000..4a50be97e --- /dev/null +++ b/newsfragments/3497.installation @@ -0,0 +1 @@ +The Tahoe-LAFS project no longer commits to maintaining binary packages for all dependencies at . Please use PyPI instead. diff --git a/newsfragments/3502.minor b/newsfragments/3502.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3509.bugfix b/newsfragments/3509.bugfix new file mode 100644 index 000000000..4d633feab --- /dev/null +++ b/newsfragments/3509.bugfix @@ -0,0 +1 @@ +Fix regression that broke flogtool results on Python 2. \ No newline at end of file diff --git a/newsfragments/3510.bugfix b/newsfragments/3510.bugfix new file mode 100644 index 000000000..d4a2bd5dc --- /dev/null +++ b/newsfragments/3510.bugfix @@ -0,0 +1 @@ +Fix a logging regression on Python 2 involving unicode strings. \ No newline at end of file diff --git a/newsfragments/3513.minor b/newsfragments/3513.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3517.minor b/newsfragments/3517.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3518.removed b/newsfragments/3518.removed new file mode 100644 index 000000000..460af5142 --- /dev/null +++ b/newsfragments/3518.removed @@ -0,0 +1 @@ +Announcements delivered through the introducer system are no longer automatically annotated with copious information about the Tahoe-LAFS software version nor the versions of its dependencies. diff --git a/newsfragments/3537.minor b/newsfragments/3537.minor new file mode 100644 index 000000000..e69de29bb diff --git a/nix/overlays.nix b/nix/overlays.nix index 4ee63a412..2bf58575e 100644 --- a/nix/overlays.nix +++ b/nix/overlays.nix @@ -15,6 +15,9 @@ self: super: { # Need version of pyutil that supports Python 3. The version in 19.09 # is too old. pyutil = python-super.callPackage ./pyutil.nix { }; + + # Need a newer version of Twisted, too. + twisted = python-super.callPackage ./twisted.nix { }; }; }; } diff --git a/nix/twisted.nix b/nix/twisted.nix new file mode 100644 index 000000000..3c11e3c71 --- /dev/null +++ b/nix/twisted.nix @@ -0,0 +1,63 @@ +{ stdenv +, buildPythonPackage +, fetchPypi +, python +, zope_interface +, incremental +, automat +, constantly +, hyperlink +, pyhamcrest +, attrs +, pyopenssl +, service-identity +, setuptools +, idna +, bcrypt +}: +buildPythonPackage rec { + pname = "Twisted"; + version = "19.10.0"; + + src = fetchPypi { + inherit pname version; + extension = "tar.bz2"; + sha256 = "7394ba7f272ae722a74f3d969dcf599bc4ef093bc392038748a490f1724a515d"; + }; + + propagatedBuildInputs = [ zope_interface incremental automat constantly hyperlink pyhamcrest attrs setuptools bcrypt ]; + + passthru.extras.tls = [ pyopenssl service-identity idna ]; + + # Patch t.p._inotify to point to libc. Without this, + # twisted.python.runtime.platform.supportsINotify() == False + patchPhase = stdenv.lib.optionalString stdenv.isLinux '' + substituteInPlace src/twisted/python/_inotify.py --replace \ + "ctypes.util.find_library('c')" "'${stdenv.glibc.out}/lib/libc.so.6'" + ''; + + # Generate Twisted's plug-in cache. Twisted users must do it as well. See + # http://twistedmatrix.com/documents/current/core/howto/plugin.html#auto3 + # and http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=477103 for + # details. + postFixup = '' + $out/bin/twistd --help > /dev/null + ''; + + checkPhase = '' + ${python.interpreter} -m unittest discover -s twisted/test + ''; + # Tests require network + doCheck = false; + + meta = with stdenv.lib; { + homepage = https://twistedmatrix.com/; + description = "Twisted, an event-driven networking engine written in Python"; + longDescription = '' + Twisted is an event-driven networking engine written in Python + and licensed under the MIT license. + ''; + license = licenses.mit; + maintainers = [ ]; + }; +} diff --git a/setup.py b/setup.py index c27681ea8..c26805684 100644 --- a/setup.py +++ b/setup.py @@ -98,7 +98,9 @@ install_requires = [ # `pip install tahoe-lafs[sftp]` would not install requirements # specified by Twisted[conch]. Since this would be the *whole point* of # an sftp extra in Tahoe-LAFS, there is no point in having one. - "Twisted[tls,conch] >= 18.4.0", + # * Twisted 19.10 introduces Site.getContentFile which we use to get + # temporary upload files placed into a per-node temporary directory. + "Twisted[tls,conch] >= 19.10.0", "PyYAML >= 3.11", diff --git a/src/allmydata/client.py b/src/allmydata/client.py index a768ba354..7cfd2156f 100644 --- a/src/allmydata/client.py +++ b/src/allmydata/client.py @@ -33,6 +33,7 @@ from allmydata.introducer.client import IntroducerClient from allmydata.util import ( hashutil, base32, pollmixin, log, idlib, yamlutil, configutil, + fileutil, ) from allmydata.util.encodingutil import get_filesystem_encoding from allmydata.util.abbreviate import parse_abbreviated_size @@ -512,7 +513,6 @@ def create_introducer_clients(config, main_tub, _introducer_factory=None): config.nickname, str(allmydata.__full_version__), str(_Client.OLDEST_SUPPORTED_VERSION), - list(node.get_app_versions()), partial(_sequencer, config), introducer_cache_filepath, ) @@ -1043,6 +1043,21 @@ class _Client(node.Node, pollmixin.PollMixin): def set_default_mutable_keysize(self, keysize): self._key_generator.set_default_keysize(keysize) + def _get_tempdir(self): + """ + Determine the path to the directory where temporary files for this node + should be written. + + :return bytes: The path which will exist and be a directory. + """ + tempdir_config = self.config.get_config("node", "tempdir", "tmp") + if isinstance(tempdir_config, bytes): + tempdir_config = tempdir_config.decode('utf-8') + tempdir = self.config.get_config_path(tempdir_config) + if not os.path.exists(tempdir): + fileutil.make_dirs(tempdir) + return tempdir + def init_web(self, webport): self.log("init_web(webport=%s)", args=(webport,)) @@ -1050,7 +1065,13 @@ class _Client(node.Node, pollmixin.PollMixin): nodeurl_path = self.config.get_config_path("node.url") staticdir_config = self.config.get_config("node", "web.static", "public_html") staticdir = self.config.get_config_path(staticdir_config) - ws = WebishServer(self, webport, nodeurl_path, staticdir) + ws = WebishServer( + self, + webport, + self._get_tempdir(), + nodeurl_path, + staticdir, + ) ws.setServiceParent(self) def init_ftp_server(self): diff --git a/src/allmydata/introducer/client.py b/src/allmydata/introducer/client.py index 36adae474..0a6352317 100644 --- a/src/allmydata/introducer/client.py +++ b/src/allmydata/introducer/client.py @@ -24,7 +24,7 @@ class IntroducerClient(service.Service, Referenceable): def __init__(self, tub, introducer_furl, nickname, my_version, oldest_supported, - app_versions, sequencer, cache_filepath): + sequencer, cache_filepath): self._tub = tub self.introducer_furl = introducer_furl @@ -32,13 +32,12 @@ class IntroducerClient(service.Service, Referenceable): self._nickname = nickname self._my_version = my_version self._oldest_supported = oldest_supported - self._app_versions = app_versions self._sequencer = sequencer self._cache_filepath = cache_filepath self._my_subscriber_info = { "version": 0, "nickname": self._nickname, - "app-versions": self._app_versions, + "app-versions": [], "my-version": self._my_version, "oldest-supported": self._oldest_supported, } @@ -190,7 +189,7 @@ class IntroducerClient(service.Service, Referenceable): # "seqnum" and "nonce" will be populated with new values in # publish(), each time we make a change "nickname": self._nickname, - "app-versions": self._app_versions, + "app-versions": [], "my-version": self._my_version, "oldest-supported": self._oldest_supported, diff --git a/src/allmydata/mutable/layout.py b/src/allmydata/mutable/layout.py index bf9a0483b..ce51a8833 100644 --- a/src/allmydata/mutable/layout.py +++ b/src/allmydata/mutable/layout.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + # Omit dict so Python 3 changes don't leak into API callers on Python 2. + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 from past.utils import old_div import struct @@ -1744,7 +1756,7 @@ class MDMFSlotReadProxy(object): def _read(self, readvs, force_remote=False): - unsatisfiable = list(filter(lambda x: x[0] + x[1] > len(self._data), readvs)) + unsatisfiable = [x for x in readvs if x[0] + x[1] > len(self._data)] # TODO: It's entirely possible to tweak this so that it just # fulfills the requests that it can, and not demand that all # requests are satisfiable before running it. diff --git a/src/allmydata/mutable/repairer.py b/src/allmydata/mutable/repairer.py index 261ca9633..23af02203 100644 --- a/src/allmydata/mutable/repairer.py +++ b/src/allmydata/mutable/repairer.py @@ -1,3 +1,14 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from zope.interface import implementer from twisted.internet import defer diff --git a/src/allmydata/mutable/retrieve.py b/src/allmydata/mutable/retrieve.py index a36a90809..894fb9776 100644 --- a/src/allmydata/mutable/retrieve.py +++ b/src/allmydata/mutable/retrieve.py @@ -1,4 +1,15 @@ -from past.builtins import unicode +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + # Don't import bytes and str, to prevent API leakage + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min # noqa: F401 import time @@ -749,9 +760,9 @@ class Retrieve(object): blockhashes = dict(enumerate(blockhashes)) self.log("the reader gave me the following blockhashes: %s" % \ - blockhashes.keys()) + list(blockhashes.keys())) self.log("the reader gave me the following sharehashes: %s" % \ - sharehashes.keys()) + list(sharehashes.keys())) bht = self._block_hash_trees[reader.shnum] if bht.needed_hashes(segnum, include_leaf=True): @@ -908,7 +919,7 @@ class Retrieve(object): def notify_server_corruption(self, server, shnum, reason): - if isinstance(reason, unicode): + if isinstance(reason, str): reason = reason.encode("utf-8") storage_server = server.get_storage_server() storage_server.advise_corrupt_share( diff --git a/src/allmydata/mutable/servermap.py b/src/allmydata/mutable/servermap.py index b985d4121..6d8e7806a 100644 --- a/src/allmydata/mutable/servermap.py +++ b/src/allmydata/mutable/servermap.py @@ -1,5 +1,15 @@ +""" +Ported to Python 3. +""" from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals +from future.utils import PY2 +if PY2: + # Doesn't import str to prevent API leakage on Python 2 + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401 from past.builtins import unicode import sys, time, copy @@ -188,7 +198,7 @@ class ServerMap(object): def dump(self, out=sys.stdout): print("servermap:", file=out) - for ( (server, shnum), (verinfo, timestamp) ) in self._known_shares.items(): + for ( (server, shnum), (verinfo, timestamp) ) in list(self._known_shares.items()): (seqnum, root_hash, IV, segsize, datalength, k, N, prefix, offsets_tuple) = verinfo print("[%s]: sh#%d seq%d-%s %d-of-%d len%d" % @@ -226,7 +236,7 @@ class ServerMap(object): """Return a dict that maps versionid to sets of (shnum, server, timestamp) tuples.""" versionmap = DictOfSets() - for ( (server, shnum), (verinfo, timestamp) ) in self._known_shares.items(): + for ( (server, shnum), (verinfo, timestamp) ) in list(self._known_shares.items()): versionmap.add(verinfo, (shnum, server, timestamp)) return versionmap @@ -245,7 +255,7 @@ class ServerMap(object): (num_distinct_shares, k, N) tuples.""" versionmap = self.make_versionmap() all_shares = {} - for verinfo, shares in versionmap.items(): + for verinfo, shares in list(versionmap.items()): s = set() for (shnum, server, timestamp) in shares: s.add(shnum) @@ -271,7 +281,7 @@ class ServerMap(object): """Return a string describing which versions we know about.""" versionmap = self.make_versionmap() bits = [] - for (verinfo, shares) in versionmap.items(): + for (verinfo, shares) in list(versionmap.items()): vstr = self.summarize_version(verinfo) shnums = set([shnum for (shnum, server, timestamp) in shares]) bits.append("%d*%s" % (len(shnums), vstr)) @@ -282,7 +292,7 @@ class ServerMap(object): recoverable.""" versionmap = self.make_versionmap() recoverable_versions = set() - for (verinfo, shares) in versionmap.items(): + for (verinfo, shares) in list(versionmap.items()): (seqnum, root_hash, IV, segsize, datalength, k, N, prefix, offsets_tuple) = verinfo shnums = set([shnum for (shnum, server, timestamp) in shares]) @@ -298,7 +308,7 @@ class ServerMap(object): versionmap = self.make_versionmap() unrecoverable_versions = set() - for (verinfo, shares) in versionmap.items(): + for (verinfo, shares) in list(versionmap.items()): (seqnum, root_hash, IV, segsize, datalength, k, N, prefix, offsets_tuple) = verinfo shnums = set([shnum for (shnum, server, timestamp) in shares]) @@ -332,7 +342,7 @@ class ServerMap(object): healths = {} # maps verinfo to (found,k) unrecoverable = set() highest_recoverable_seqnum = -1 - for (verinfo, shares) in versionmap.items(): + for (verinfo, shares) in list(versionmap.items()): (seqnum, root_hash, IV, segsize, datalength, k, N, prefix, offsets_tuple) = verinfo shnums = set([shnum for (shnum, server, timestamp) in shares]) @@ -667,7 +677,7 @@ class ServermapUpdater(object): ds = [] - for shnum,datav in datavs.items(): + for shnum,datav in list(datavs.items()): data = datav[0] reader = MDMFSlotReadProxy(ss, storage_index, diff --git a/src/allmydata/node.py b/src/allmydata/node.py index 9e7143fd4..0dcd900aa 100644 --- a/src/allmydata/node.py +++ b/src/allmydata/node.py @@ -19,7 +19,6 @@ import os.path import re import types import errno -import tempfile from base64 import b32decode, b32encode # On Python 2 this will be the backported package. @@ -28,16 +27,20 @@ import configparser from twisted.python import log as twlog from twisted.application import service from twisted.python.failure import Failure -from foolscap.api import Tub, app_versions +from foolscap.api import Tub + import foolscap.logging.log -from allmydata.version_checks import get_package_versions, get_package_versions_string + from allmydata.util import log from allmydata.util import fileutil, iputil -from allmydata.util.assertutil import _assert from allmydata.util.fileutil import abspath_expanduser_unicode from allmydata.util.encodingutil import get_filesystem_encoding, quote_output from allmydata.util import configutil +from . import ( + __full_version__, +) + def _common_valid_config(): return configutil.ValidConfiguration({ "connections": ( @@ -78,11 +81,6 @@ def _common_valid_config(): ), }) -# Add our application versions to the data that Foolscap's LogPublisher -# reports. -for thing, things_version in list(get_package_versions().items()): - app_versions.add_version(thing, things_version) - # group 1 will be addr (dotted quad string), group 3 if any will be portnum (string) ADDR_RE = re.compile("^([1-9][0-9]*\.[1-9][0-9]*\.[1-9][0-9]*\.[1-9][0-9]*)(:([1-9][0-9]*))?$") @@ -228,13 +226,6 @@ def config_from_string(basedir, portnumfile, config_str, _valid_config=None): return _Config(parser, portnumfile, basedir, fname) -def get_app_versions(): - """ - :returns: dict of versions important to Foolscap - """ - return dict(app_versions.versions) - - def _error_about_old_config_files(basedir, generated_files): """ If any old configuration files are detected, raise @@ -740,8 +731,6 @@ class Node(service.MultiService): self._i2p_provider = i2p_provider self._tor_provider = tor_provider - self.init_tempdir() - self.create_log_tub() self.logSource = "Node" self.setup_logging() @@ -759,7 +748,7 @@ class Node(service.MultiService): if self.control_tub is not None: self.control_tub.setServiceParent(self) - self.log("Node constructed. " + get_package_versions_string()) + self.log("Node constructed. " + __full_version__) iputil.increase_rlimits() def _is_tub_listening(self): @@ -768,25 +757,6 @@ class Node(service.MultiService): """ return len(self.tub.getListeners()) > 0 - def init_tempdir(self): - """ - Initialize/create a directory for temporary files. - """ - tempdir_config = self.config.get_config("node", "tempdir", "tmp") - if isinstance(tempdir_config, bytes): - tempdir_config = tempdir_config.decode('utf-8') - tempdir = self.config.get_config_path(tempdir_config) - if not os.path.exists(tempdir): - fileutil.make_dirs(tempdir) - tempfile.tempdir = tempdir - # this should cause twisted.web.http (which uses - # tempfile.TemporaryFile) to put large request bodies in the given - # directory. Without this, the default temp dir is usually /tmp/, - # which is frequently too small. - temp_fd, test_name = tempfile.mkstemp() - _assert(os.path.dirname(test_name) == tempdir, test_name, tempdir) - os.close(temp_fd) # avoid leak of unneeded fd - # pull this outside of Node's __init__ too, see: # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2948 def create_log_tub(self): diff --git a/src/allmydata/scripts/runner.py b/src/allmydata/scripts/runner.py index cfd22694b..3436a1b84 100644 --- a/src/allmydata/scripts/runner.py +++ b/src/allmydata/scripts/runner.py @@ -7,7 +7,6 @@ import six from twisted.python import usage from twisted.internet import defer, task, threads -from allmydata.version_checks import get_package_versions_string from allmydata.scripts.common import get_default_nodedir from allmydata.scripts import debug, create_node, cli, \ stats_gatherer, admin, tahoe_daemonize, tahoe_start, \ @@ -19,6 +18,10 @@ from allmydata.util.eliotutil import ( eliot_logging_service, ) +from .. import ( + __full_version__, +) + _default_nodedir = get_default_nodedir() NODEDIR_HELP = ("Specify which Tahoe node directory should be used. The " @@ -77,12 +80,10 @@ class Options(usage.Options): ] def opt_version(self): - print(get_package_versions_string(debug=True), file=self.stdout) + print(__full_version__, file=self.stdout) self.no_command_needed = True - def opt_version_and_path(self): - print(get_package_versions_string(show_paths=True, debug=True), file=self.stdout) - self.no_command_needed = True + opt_version_and_path = opt_version opt_eliot_destination = opt_eliot_destination opt_help_eliot_destinations = opt_help_eliot_destinations diff --git a/src/allmydata/test/cli/test_cli.py b/src/allmydata/test/cli/test_cli.py index 72e4fe69d..7f4f4140e 100644 --- a/src/allmydata/test/cli/test_cli.py +++ b/src/allmydata/test/cli/test_cli.py @@ -1266,7 +1266,7 @@ class Options(ReallyEqualMixin, unittest.TestCase): # "tahoe --version" dumps text to stdout and exits stdout = StringIO() self.failUnlessRaises(SystemExit, self.parse, ["--version"], stdout) - self.failUnlessIn(allmydata.__appname__ + ":", stdout.getvalue()) + self.failUnlessIn(allmydata.__full_version__, stdout.getvalue()) # but "tahoe SUBCOMMAND --version" should be rejected self.failUnlessRaises(usage.UsageError, self.parse, ["start", "--version"]) diff --git a/src/allmydata/test/common.py b/src/allmydata/test/common.py index 1cf1d6428..a420dd3ba 100644 --- a/src/allmydata/test/common.py +++ b/src/allmydata/test/common.py @@ -110,7 +110,6 @@ class MemoryIntroducerClient(object): nickname = attr.ib() my_version = attr.ib() oldest_supported = attr.ib() - app_versions = attr.ib() sequencer = attr.ib() cache_filepath = attr.ib() @@ -1151,8 +1150,9 @@ class _TestCaseMixin(object): test (including setUp and tearDown messages). * trial-compatible mktemp method * unittest2-compatible assertRaises helper - * Automatic cleanup of tempfile.tempdir mutation (pervasive through the - Tahoe-LAFS test suite). + * Automatic cleanup of tempfile.tempdir mutation (once pervasive through + the Tahoe-LAFS test suite, perhaps gone now but someone should verify + this). """ def setUp(self): # Restore the original temporary directory. Node ``init_tempdir`` diff --git a/src/allmydata/test/no_network.py b/src/allmydata/test/no_network.py index 54095f15d..59ab807bb 100644 --- a/src/allmydata/test/no_network.py +++ b/src/allmydata/test/no_network.py @@ -357,7 +357,7 @@ class NoNetworkGrid(service.MultiService): to complete properly """ if self._setup_errors: - raise self._setup_errors[0].value + self._setup_errors[0].raiseException() @defer.inlineCallbacks def make_client(self, i, write_config=True): diff --git a/src/allmydata/test/test_client.py b/src/allmydata/test/test_client.py index 0f0648a4c..54c5be8e5 100644 --- a/src/allmydata/test/test_client.py +++ b/src/allmydata/test/test_client.py @@ -41,9 +41,6 @@ import allmydata.util.log from allmydata.node import OldConfigError, UnescapedHashError, create_node_dir from allmydata.frontends.auth import NeedRootcapLookupScheme -from allmydata.version_checks import ( - get_package_versions_string, -) from allmydata import client from allmydata.storage_client import ( StorageClientConfig, @@ -621,8 +618,6 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase): self.failIfEqual(str(allmydata.__version__), "unknown") self.failUnless("." in str(allmydata.__full_version__), "non-numeric version in '%s'" % allmydata.__version__) - all_versions = get_package_versions_string() - self.failUnless(allmydata.__appname__ in all_versions) # also test stats stats = c.get_stats() self.failUnless("node.uptime" in stats) diff --git a/src/allmydata/test/test_introducer.py b/src/allmydata/test/test_introducer.py index d99e18c4a..d77b637e3 100644 --- a/src/allmydata/test/test_introducer.py +++ b/src/allmydata/test/test_introducer.py @@ -155,7 +155,7 @@ class ServiceMixin(object): class Introducer(ServiceMixin, AsyncTestCase): def test_create(self): ic = IntroducerClient(None, "introducer.furl", u"my_nickname", - "my_version", "oldest_version", {}, fakeseq, + "my_version", "oldest_version", fakeseq, FilePath(self.mktemp())) self.failUnless(isinstance(ic, IntroducerClient)) @@ -188,13 +188,13 @@ class Client(AsyncTestCase): def test_duplicate_receive_v2(self): ic1 = IntroducerClient(None, "introducer.furl", u"my_nickname", - "ver23", "oldest_version", {}, fakeseq, + "ver23", "oldest_version", fakeseq, FilePath(self.mktemp())) # we use a second client just to create a different-looking # announcement ic2 = IntroducerClient(None, "introducer.furl", u"my_nickname", - "ver24","oldest_version",{}, fakeseq, + "ver24","oldest_version",fakeseq, FilePath(self.mktemp())) announcements = [] def _received(key_s, ann): @@ -298,7 +298,7 @@ class Server(AsyncTestCase): i = IntroducerService() ic1 = IntroducerClient(None, "introducer.furl", u"my_nickname", - "ver23", "oldest_version", {}, realseq, + "ver23", "oldest_version", realseq, FilePath(self.mktemp())) furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/gydnp" @@ -396,7 +396,7 @@ class Queue(SystemTestMixin, AsyncTestCase): tub2 = Tub() tub2.setServiceParent(self.parent) c = IntroducerClient(tub2, ifurl, - u"nickname", "version", "oldest", {}, fakeseq, + u"nickname", "version", "oldest", fakeseq, FilePath(self.mktemp())) furl1 = "pb://onug64tu@127.0.0.1:123/short" # base32("short") private_key, _ = ed25519.create_signing_keypair() @@ -477,7 +477,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase): c = IntroducerClient(tub, self.introducer_furl, NICKNAME % str(i), "version", "oldest", - {"component": "component-v1"}, fakeseq, + fakeseq, FilePath(self.mktemp())) received_announcements[c] = {} def got(key_s_or_tubid, ann, announcements): @@ -737,9 +737,8 @@ class ClientInfo(AsyncTestCase): def test_client_v2(self): introducer = IntroducerService() tub = introducer_furl = None - app_versions = {"whizzy": "fizzy"} client_v2 = IntroducerClient(tub, introducer_furl, NICKNAME % u"v2", - "my_version", "oldest", app_versions, + "my_version", "oldest", fakeseq, FilePath(self.mktemp())) #furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:0/swissnum" #ann_s = make_ann_t(client_v2, furl1, None, 10) @@ -751,7 +750,6 @@ class ClientInfo(AsyncTestCase): self.failUnlessEqual(len(subs), 1) s0 = subs[0] self.failUnlessEqual(s0.service_name, "storage") - self.failUnlessEqual(s0.app_versions, app_versions) self.failUnlessEqual(s0.nickname, NICKNAME % u"v2") self.failUnlessEqual(s0.version, "my_version") @@ -760,9 +758,8 @@ class Announcements(AsyncTestCase): def test_client_v2_signed(self): introducer = IntroducerService() tub = introducer_furl = None - app_versions = {"whizzy": "fizzy"} client_v2 = IntroducerClient(tub, introducer_furl, u"nick-v2", - "my_version", "oldest", app_versions, + "my_version", "oldest", fakeseq, FilePath(self.mktemp())) furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:0/swissnum" @@ -776,7 +773,6 @@ class Announcements(AsyncTestCase): self.failUnlessEqual(len(a), 1) self.assertThat(a[0].canary, Is(canary0)) self.failUnlessEqual(a[0].index, ("storage", public_key_str)) - self.failUnlessEqual(a[0].announcement["app-versions"], app_versions) self.failUnlessEqual(a[0].nickname, u"nick-v2") self.failUnlessEqual(a[0].service_name, "storage") self.failUnlessEqual(a[0].version, "my_version") @@ -854,7 +850,7 @@ class Announcements(AsyncTestCase): # test loading yield flushEventualQueue() ic2 = IntroducerClient(None, "introducer.furl", u"my_nickname", - "my_version", "oldest_version", {}, fakeseq, + "my_version", "oldest_version", fakeseq, ic._cache_filepath) announcements = {} def got(key_s, ann): @@ -954,7 +950,7 @@ class NonV1Server(SystemTestMixin, AsyncTestCase): tub.setServiceParent(self.parent) listenOnUnused(tub) c = IntroducerClient(tub, self.introducer_furl, - u"nickname-client", "version", "oldest", {}, + u"nickname-client", "version", "oldest", fakeseq, FilePath(self.mktemp())) announcements = {} def got(key_s, ann): @@ -1027,7 +1023,6 @@ class Signatures(SyncTestCase): u"fake_nick", "0.0.0", "1.2.3", - {}, (0, u"i am a nonce"), "invalid", ) diff --git a/src/allmydata/test/test_log.py b/src/allmydata/test/test_log.py index eecbda9e3..bf079aaeb 100644 --- a/src/allmydata/test/test_log.py +++ b/src/allmydata/test/test_log.py @@ -9,7 +9,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function -from future.utils import PY2 +from future.utils import PY2, native_str if PY2: from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 @@ -154,3 +154,17 @@ class Log(unittest.TestCase): obj.log("four") self.assertEqual([m[2] for m in self.messages], ["grand", "par1", "par2", "msg1", "msg1"]) + + def test_native_string_keys(self): + """Keyword argument keys are all native strings.""" + class LoggingObject17(tahoe_log.PrefixingLogMixin): + pass + + obj = LoggingObject17() + # Native string by default: + obj.log(hello="world") + # Will be Unicode on Python 2: + obj.log(**{"my": "message"}) + for message in self.messages: + for k in message[-1].keys(): + self.assertIsInstance(k, native_str) diff --git a/src/allmydata/test/test_runner.py b/src/allmydata/test/test_runner.py index d7fa08a0c..7d614d486 100644 --- a/src/allmydata/test/test_runner.py +++ b/src/allmydata/test/test_runner.py @@ -12,7 +12,6 @@ from twisted.internet import reactor from twisted.python import usage from twisted.internet.defer import ( inlineCallbacks, - returnValue, DeferredList, ) from twisted.python.filepath import FilePath @@ -20,12 +19,9 @@ from twisted.python.runtime import ( platform, ) from allmydata.util import fileutil, pollmixin -from allmydata.util.encodingutil import unicode_to_argv, unicode_to_output, \ - get_filesystem_encoding +from allmydata.util.encodingutil import unicode_to_argv, unicode_to_output from allmydata.test import common_util -from allmydata.version_checks import normalized_version import allmydata -from allmydata import __appname__ from .common_util import parse_cli, run_cli from .cli_node_api import ( CLINodeAPI, @@ -58,17 +54,6 @@ rootdir = get_root_from_file(srcfile) class RunBinTahoeMixin(object): - - @inlineCallbacks - def find_import_location(self): - res = yield self.run_bintahoe(["--version-and-path"]) - out, err, rc_or_sig = res - self.assertEqual(rc_or_sig, 0, res) - lines = out.splitlines() - tahoe_pieces = lines[0].split() - self.assertEqual(tahoe_pieces[0], "%s:" % (__appname__,), (tahoe_pieces, res)) - returnValue(tahoe_pieces[-1].strip("()")) - def run_bintahoe(self, args, stdin=None, python_options=[], env=None): command = sys.executable argv = python_options + ["-m", "allmydata.scripts.runner"] + args @@ -86,64 +71,6 @@ class RunBinTahoeMixin(object): class BinTahoe(common_util.SignalMixin, unittest.TestCase, RunBinTahoeMixin): - @inlineCallbacks - def test_the_right_code(self): - # running "tahoe" in a subprocess should find the same code that - # holds this test file, else something is weird - test_path = os.path.dirname(os.path.dirname(os.path.normcase(os.path.realpath(srcfile)))) - bintahoe_import_path = yield self.find_import_location() - - same = (bintahoe_import_path == test_path) - if not same: - msg = ("My tests and my 'tahoe' executable are using different paths.\n" - "tahoe: %r\n" - "tests: %r\n" - "( according to the test source filename %r)\n" % - (bintahoe_import_path, test_path, srcfile)) - - if (not isinstance(rootdir, unicode) and - rootdir.decode(get_filesystem_encoding(), 'replace') != rootdir): - msg += ("However, this may be a false alarm because the import path\n" - "is not representable in the filesystem encoding.") - raise unittest.SkipTest(msg) - else: - msg += "Please run the tests in a virtualenv that includes both the Tahoe-LAFS library and the 'tahoe' executable." - self.fail(msg) - - def test_path(self): - d = self.run_bintahoe(["--version-and-path"]) - def _cb(res): - out, err, rc_or_sig = res - self.failUnlessEqual(rc_or_sig, 0, str(res)) - - # Fail unless the __appname__ package is *this* version *and* - # was loaded from *this* source directory. - - required_verstr = str(allmydata.__version__) - - self.failIfEqual(required_verstr, "unknown", - "We don't know our version, because this distribution didn't come " - "with a _version.py and 'setup.py update_version' hasn't been run.") - - srcdir = os.path.dirname(os.path.dirname(os.path.normcase(os.path.realpath(srcfile)))) - info = repr((res, allmydata.__appname__, required_verstr, srcdir)) - - appverpath = out.split(')')[0] - (appverfull, path) = appverpath.split('] (') - (appver, comment) = appverfull.split(' [') - (branch, full_version) = comment.split(': ') - (app, ver) = appver.split(': ') - - self.failUnlessEqual(app, allmydata.__appname__, info) - norm_ver = normalized_version(ver) - norm_required = normalized_version(required_verstr) - self.failUnlessEqual(norm_ver, norm_required, info) - self.failUnlessEqual(path, srcdir, info) - self.failUnlessEqual(branch, allmydata.branch) - self.failUnlessEqual(full_version, allmydata.full_version) - d.addCallback(_cb) - return d - def test_unicode_arguments_and_output(self): tricky = u"\u2621" try: @@ -165,8 +92,8 @@ class BinTahoe(common_util.SignalMixin, unittest.TestCase, RunBinTahoeMixin): d = self.run_bintahoe(["--version"], python_options=["-t"]) def _cb(res): out, err, rc_or_sig = res - self.failUnlessEqual(rc_or_sig, 0, str(res)) - self.failUnless(out.startswith(allmydata.__appname__+':'), str(res)) + self.assertEqual(rc_or_sig, 0, str(res)) + self.assertTrue(out.startswith(allmydata.__appname__ + '/'), str(res)) d.addCallback(_cb) return d diff --git a/src/allmydata/test/test_version.py b/src/allmydata/test/test_version.py deleted file mode 100644 index f5f92ef9b..000000000 --- a/src/allmydata/test/test_version.py +++ /dev/null @@ -1,275 +0,0 @@ -""" -Tests for allmydata.util.verlib and allmydata.version_checks. - -Ported to Python 3. -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -import sys -import pkg_resources -from operator import ( - setitem, -) -from twisted.trial import unittest - -from allmydata.version_checks import ( - _cross_check as cross_check, - _extract_openssl_version as extract_openssl_version, - _get_package_versions_and_locations as get_package_versions_and_locations, -) -from allmydata.util.verlib import NormalizedVersion as V, \ - IrrationalVersionError, \ - suggest_normalized_version as suggest - - -class MockSSL(object): - SSLEAY_VERSION = 0 - SSLEAY_CFLAGS = 2 - - def __init__(self, version, compiled_without_heartbeats=False): - self.opts = { - self.SSLEAY_VERSION: version, - self.SSLEAY_CFLAGS: compiled_without_heartbeats and 'compiler: gcc -DOPENSSL_NO_HEARTBEATS' - or 'compiler: gcc', - } - - def SSLeay_version(self, which): - return self.opts[which] - - -class CheckRequirement(unittest.TestCase): - def test_packages_from_pkg_resources(self): - if hasattr(sys, 'frozen'): - raise unittest.SkipTest("This test doesn't apply to frozen builds.") - - class MockPackage(object): - def __init__(self, project_name, version, location): - self.project_name = project_name - self.version = version - self.location = location - - def call_pkg_resources_require(*args): - return [MockPackage("Foo", "1.0", "/path")] - self.patch(pkg_resources, 'require', call_pkg_resources_require) - - (packages, errors) = get_package_versions_and_locations() - self.failUnlessIn(("foo", ("1.0", "/path", "according to pkg_resources")), packages) - self.failIfEqual(errors, []) - self.failUnlessEqual([e for e in errors if "was not found by pkg_resources" not in e], []) - - def test_cross_check_unparseable_versions(self): - # The bug in #1355 is triggered when a version string from either pkg_resources or import - # is not parseable at all by normalized_version. - - res = cross_check({"foo": ("unparseable", "")}, [("foo", ("1.0", "", None))]) - self.failUnlessEqual(res, []) - - res = cross_check({"foo": ("1.0", "")}, [("foo", ("unparseable", "", None))]) - self.failUnlessEqual(res, []) - - res = cross_check({"foo": ("unparseable", "")}, [("foo", ("unparseable", "", None))]) - self.failUnlessEqual(res, []) - - def test_cross_check(self): - res = cross_check({}, []) - self.failUnlessEqual(res, []) - - res = cross_check({}, [("tahoe-lafs", ("1.0", "", "blah"))]) - self.failUnlessEqual(res, []) - - res = cross_check({"foo": ("unparseable", "")}, []) - self.failUnlessEqual(res, []) - - res = cross_check({"argparse": ("unparseable", "")}, []) - self.failUnlessEqual(res, []) - - res = cross_check({}, [("foo", ("unparseable", "", None))]) - self.failUnlessEqual(len(res), 1) - self.assertTrue(("version 'unparseable'" in res[0]) or ("version u'unparseable'" in res[0])) - self.failUnlessIn("was not found by pkg_resources", res[0]) - - res = cross_check({"distribute": ("1.0", "/somewhere")}, [("setuptools", ("2.0", "/somewhere", "distribute"))]) - self.failUnlessEqual(res, []) - - res = cross_check({"distribute": ("1.0", "/somewhere")}, [("setuptools", ("2.0", "/somewhere", None))]) - self.failUnlessEqual(len(res), 1) - self.failUnlessIn("location mismatch", res[0]) - - res = cross_check({"distribute": ("1.0", "/somewhere")}, [("setuptools", ("2.0", "/somewhere_different", None))]) - self.failUnlessEqual(len(res), 1) - self.failUnlessIn("location mismatch", res[0]) - - res = cross_check({"zope.interface": ("1.0", "")}, [("zope.interface", ("unknown", "", None))]) - self.failUnlessEqual(res, []) - - res = cross_check({"zope.interface": ("unknown", "")}, [("zope.interface", ("unknown", "", None))]) - self.failUnlessEqual(res, []) - - res = cross_check({"foo": ("1.0", "")}, [("foo", ("unknown", "", None))]) - self.failUnlessEqual(len(res), 1) - self.failUnlessIn("could not find a version number", res[0]) - - res = cross_check({"foo": ("unknown", "")}, [("foo", ("unknown", "", None))]) - self.failUnlessEqual(res, []) - - # When pkg_resources and import both find a package, there is only a warning if both - # the version and the path fail to match. - - res = cross_check({"foo": ("1.0", "/somewhere")}, [("foo", ("2.0", "/somewhere", None))]) - self.failUnlessEqual(res, []) - - res = cross_check({"foo": ("1.0", "/somewhere")}, [("foo", ("1.0", "/somewhere_different", None))]) - self.failUnlessEqual(res, []) - - res = cross_check({"foo": ("1.0-r123", "/somewhere")}, [("foo", ("1.0.post123", "/somewhere_different", None))]) - self.failUnlessEqual(res, []) - - res = cross_check({"foo": ("1.0", "/somewhere")}, [("foo", ("2.0", "/somewhere_different", None))]) - self.failUnlessEqual(len(res), 1) - self.assertTrue(("but version '2.0'" in res[0]) or ("but version u'2.0'" in res[0])) - - def test_extract_openssl_version(self): - self.failUnlessEqual(extract_openssl_version(MockSSL("")), - ("", None, None)) - self.failUnlessEqual(extract_openssl_version(MockSSL("NotOpenSSL a.b.c foo")), - ("NotOpenSSL", None, "a.b.c foo")) - self.failUnlessEqual(extract_openssl_version(MockSSL("OpenSSL a.b.c")), - ("a.b.c", None, None)) - self.failUnlessEqual(extract_openssl_version(MockSSL("OpenSSL 1.0.1e 11 Feb 2013")), - ("1.0.1e", None, "11 Feb 2013")) - self.failUnlessEqual(extract_openssl_version(MockSSL("OpenSSL 1.0.1e 11 Feb 2013", compiled_without_heartbeats=True)), - ("1.0.1e", None, "11 Feb 2013, no heartbeats")) - - -# based on https://bitbucket.org/tarek/distutilsversion/src/17df9a7d96ef/test_verlib.py - -class VersionTestCase(unittest.TestCase): - versions = ((V('1.0'), '1.0'), - (V('1.1'), '1.1'), - (V('1.2.3'), '1.2.3'), - (V('1.2'), '1.2'), - (V('1.2.3a4'), '1.2.3a4'), - (V('1.2c4'), '1.2c4'), - (V('1.2.3.4'), '1.2.3.4'), - (V('1.2.3.4.0b3'), '1.2.3.4b3'), - (V('1.2.0.0.0'), '1.2'), - (V('1.0.dev345'), '1.0.dev345'), - (V('1.0.post456.dev623'), '1.0.post456.dev623')) - - def test_basic_versions(self): - for v, s in self.versions: - self.failUnlessEqual(str(v), s) - - def test_from_parts(self): - for v, s in self.versions: - parts = v.parts - v2 = V.from_parts(*parts) - self.failUnlessEqual(v, v2) - self.failUnlessEqual(str(v), str(v2)) - - def test_irrational_versions(self): - irrational = ('1', '1.2a', '1.2.3b', '1.02', '1.2a03', - '1.2a3.04', '1.2.dev.2', '1.2dev', '1.2.dev', - '1.2.dev2.post2', '1.2.post2.dev3.post4') - - for s in irrational: - self.failUnlessRaises(IrrationalVersionError, V, s) - - def test_comparison(self): - self.failUnlessRaises(TypeError, lambda: V('1.2.0') == '1.2') - - self.failUnlessEqual(V('1.2.0'), V('1.2')) - self.failIfEqual(V('1.2.0'), V('1.2.3')) - self.failUnless(V('1.2.0') < V('1.2.3')) - self.failUnless(V('1.0') > V('1.0b2')) - self.failUnless(V('1.0') > V('1.0c2') > V('1.0c1') > V('1.0b2') > V('1.0b1') - > V('1.0a2') > V('1.0a1')) - self.failUnless(V('1.0.0') > V('1.0.0c2') > V('1.0.0c1') > V('1.0.0b2') > V('1.0.0b1') - > V('1.0.0a2') > V('1.0.0a1')) - - self.failUnless(V('1.0') < V('1.0.post456.dev623')) - self.failUnless(V('1.0.post456.dev623') < V('1.0.post456') < V('1.0.post1234')) - - self.failUnless(V('1.0a1') - < V('1.0a2.dev456') - < V('1.0a2') - < V('1.0a2.1.dev456') # e.g. need to do a quick post release on 1.0a2 - < V('1.0a2.1') - < V('1.0b1.dev456') - < V('1.0b2') - < V('1.0c1') - < V('1.0c2.dev456') - < V('1.0c2') - < V('1.0.dev7') - < V('1.0.dev18') - < V('1.0.dev456') - < V('1.0.dev1234') - < V('1.0') - < V('1.0.post456.dev623') # development version of a post release - < V('1.0.post456')) - - def test_suggest_normalized_version(self): - self.failUnlessEqual(suggest('1.0'), '1.0') - self.failUnlessEqual(suggest('1.0-alpha1'), '1.0a1') - self.failUnlessEqual(suggest('1.0c2'), '1.0c2') - self.failUnlessEqual(suggest('walla walla washington'), None) - self.failUnlessEqual(suggest('2.4c1'), '2.4c1') - - # from setuptools - self.failUnlessEqual(suggest('0.4a1.r10'), '0.4a1.post10') - self.failUnlessEqual(suggest('0.7a1dev-r66608'), '0.7a1.dev66608') - self.failUnlessEqual(suggest('0.6a9.dev-r41475'), '0.6a9.dev41475') - self.failUnlessEqual(suggest('2.4preview1'), '2.4c1') - self.failUnlessEqual(suggest('2.4pre1') , '2.4c1') - self.failUnlessEqual(suggest('2.1-rc2'), '2.1c2') - - # from pypi - self.failUnlessEqual(suggest('0.1dev'), '0.1.dev0') - self.failUnlessEqual(suggest('0.1.dev'), '0.1.dev0') - - # we want to be able to parse Twisted - # development versions are like post releases in Twisted - self.failUnlessEqual(suggest('9.0.0+r2363'), '9.0.0.post2363') - - # pre-releases are using markers like "pre1" - self.failUnlessEqual(suggest('9.0.0pre1'), '9.0.0c1') - - # we want to be able to parse Tcl-TK - # they use "p1" "p2" for post releases - self.failUnlessEqual(suggest('1.4p1'), '1.4.post1') - - # from darcsver - self.failUnlessEqual(suggest('1.8.1-r4956'), '1.8.1.post4956') - - # zetuptoolz - self.failUnlessEqual(suggest('0.6c16dev3'), '0.6c16.dev3') - - -class T(unittest.TestCase): - def test_report_import_error(self): - """ - get_package_versions_and_locations reports a dependency if a dependency - cannot be imported. - """ - # Make sure we don't leave the system in a bad state. - self.addCleanup( - lambda foolscap=sys.modules["foolscap"]: setitem( - sys.modules, - "foolscap", - foolscap, - ), - ) - # Make it look like Foolscap isn't installed. - sys.modules["foolscap"] = None - vers_and_locs, errors = get_package_versions_and_locations() - - foolscap_stuffs = [stuff for (pkg, stuff) in vers_and_locs if pkg == 'foolscap'] - self.failUnlessEqual(len(foolscap_stuffs), 1) - self.failUnless([e for e in errors if "\'foolscap\' could not be imported" in e]) diff --git a/src/allmydata/test/web/test_introducer.py b/src/allmydata/test/web/test_introducer.py index bf6ef6a4b..929fba507 100644 --- a/src/allmydata/test/web/test_introducer.py +++ b/src/allmydata/test/web/test_introducer.py @@ -127,7 +127,7 @@ class IntroducerWeb(unittest.TestCase): assert_soup_has_text( self, soup, - u"%s: %s" % (allmydata.__appname__, allmydata.__version__), + allmydata.__full_version__, ) assert_soup_has_text(self, soup, u"no peers!") assert_soup_has_text(self, soup, u"subscribers!") diff --git a/src/allmydata/test/web/test_web.py b/src/allmydata/test/web/test_web.py index aa6d44ea4..326569a26 100644 --- a/src/allmydata/test/web/test_web.py +++ b/src/allmydata/test/web/test_web.py @@ -6,6 +6,9 @@ import treq from bs4 import BeautifulSoup +from twisted.python.filepath import ( + FilePath, +) from twisted.application import service from twisted.internet import defer from twisted.internet.defer import inlineCallbacks, returnValue @@ -316,8 +319,16 @@ class WebMixin(TimezoneMixin): self.staticdir = self.mktemp() self.clock = Clock() self.fakeTime = 86460 # 1d 0h 1m 0s - self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir, - clock=self.clock, now_fn=lambda:self.fakeTime) + tempdir = FilePath(self.mktemp()) + tempdir.makedirs() + self.ws = webish.WebishServer( + self.s, + "0", + tempdir=tempdir.path, + staticdir=self.staticdir, + clock=self.clock, + now_fn=lambda:self.fakeTime, + ) self.ws.setServiceParent(self.s) self.webish_port = self.ws.getPortnum() self.webish_url = self.ws.getURL() diff --git a/src/allmydata/test/web/test_webish.py b/src/allmydata/test/web/test_webish.py index 1e659812f..e680acd04 100644 --- a/src/allmydata/test/web/test_webish.py +++ b/src/allmydata/test/web/test_webish.py @@ -5,6 +5,19 @@ Tests for ``allmydata.webish``. from uuid import ( uuid4, ) +from errno import ( + EACCES, +) +from io import ( + BytesIO, +) + +from hypothesis import ( + given, +) +from hypothesis.strategies import ( + integers, +) from testtools.matchers import ( AfterPreprocessing, @@ -12,8 +25,13 @@ from testtools.matchers import ( Equals, MatchesAll, Not, + IsInstance, + HasLength, ) +from twisted.python.runtime import ( + platform, +) from twisted.python.filepath import ( FilePath, ) @@ -30,7 +48,7 @@ from ..common import ( from ...webish import ( TahoeLAFSRequest, - tahoe_lafs_site, + TahoeLAFSSite, ) @@ -96,7 +114,7 @@ class TahoeLAFSRequestTests(SyncTestCase): class TahoeLAFSSiteTests(SyncTestCase): """ - Tests for the ``Site`` created by ``tahoe_lafs_site``. + Tests for ``TahoeLAFSSite``. """ def _test_censoring(self, path, censored): """ @@ -112,7 +130,7 @@ class TahoeLAFSSiteTests(SyncTestCase): """ logPath = self.mktemp() - site = tahoe_lafs_site(Resource(), logPath=logPath) + site = TahoeLAFSSite(self.mktemp(), Resource(), logPath=logPath) site.startFactory() channel = DummyChannel() @@ -170,6 +188,106 @@ class TahoeLAFSSiteTests(SyncTestCase): b"/uri?uri=[CENSORED]", ) + def _create_request(self, tempdir): + """ + Create and return a new ``TahoeLAFSRequest`` hooked up to a + ``TahoeLAFSSite``. + + :param bytes tempdir: The temporary directory to give to the site. + + :return TahoeLAFSRequest: The new request instance. + """ + site = TahoeLAFSSite(tempdir.path, Resource(), logPath=self.mktemp()) + site.startFactory() + + channel = DummyChannel() + channel.site = site + request = TahoeLAFSRequest(channel) + return request + + @given(integers(min_value=0, max_value=1024 * 1024 - 1)) + def test_small_content(self, request_body_size): + """ + A request body smaller than 1 MiB is kept in memory. + """ + tempdir = FilePath(self.mktemp()) + request = self._create_request(tempdir) + request.gotLength(request_body_size) + self.assertThat( + request.content, + IsInstance(BytesIO), + ) + + def _large_request_test(self, request_body_size): + """ + Assert that when a request with a body of of the given size is received + its content is written to the directory the ``TahoeLAFSSite`` is + configured with. + """ + tempdir = FilePath(self.mktemp()) + tempdir.makedirs() + request = self._create_request(tempdir) + + # So. Bad news. The temporary file for the uploaded content is + # unnamed (and this isn't even necessarily a bad thing since it is how + # you get automatic on-process-exit cleanup behavior on POSIX). It's + # not visible by inspecting the filesystem. It has no name we can + # discover. Then how do we verify it is written to the right place? + # The question itself is meaningless if we try to be too precise. It + # *has* no filesystem location. However, it is still stored *on* some + # filesystem. We still want to make sure it is on the filesystem we + # specified because otherwise it might be on a filesystem that's too + # small or undesirable in some other way. + # + # I don't know of any way to ask a file descriptor which filesystem + # it's on, either, though. It might be the case that the [f]statvfs() + # result could be compared somehow to infer the filesystem but + # ... it's not clear what the failure modes might be there, across + # different filesystems and runtime environments. + # + # Another approach is to make the temp directory unwriteable and + # observe the failure when an attempt is made to create a file there. + # This is hardly a lovely solution but at least it's kind of simple. + # + # It would be nice if it worked consistently cross-platform but on + # Windows os.chmod is more or less broken. + if platform.isWindows(): + request.gotLength(request_body_size) + self.assertThat( + tempdir.children(), + HasLength(1), + ) + else: + tempdir.chmod(0o550) + with self.assertRaises(OSError) as ctx: + request.gotLength(request_body_size) + raise Exception( + "OSError not raised, instead tempdir.children() = {}".format( + tempdir.children(), + ), + ) + + self.assertThat( + ctx.exception.errno, + Equals(EACCES), + ) + + def test_unknown_request_size(self): + """ + A request body with an unknown size is written to a file in the temporary + directory passed to ``TahoeLAFSSite``. + """ + self._large_request_test(None) + + @given(integers(min_value=1024 * 1024)) + def test_large_request(self, request_body_size): + """ + A request body of 1 MiB or more is written to a file in the temporary + directory passed to ``TahoeLAFSSite``. + """ + self._large_request_test(request_body_size) + + def param(name, value): return u"; {}={}".format(name, value) diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py index 7afefceed..f972fa0a1 100644 --- a/src/allmydata/util/_python3.py +++ b/src/allmydata/util/_python3.py @@ -56,7 +56,11 @@ PORTED_MODULES = [ "allmydata.mutable.checker", "allmydata.mutable.common", "allmydata.mutable.filenode", + "allmydata.mutable.layout", "allmydata.mutable.publish", + "allmydata.mutable.repairer", + "allmydata.mutable.retrieve", + "allmydata.mutable.servermap", "allmydata.node", "allmydata.storage_client", "allmydata.storage.common", @@ -155,5 +159,4 @@ PORTED_TEST_MODULES = [ "allmydata.test.test_upload", "allmydata.test.test_uri", "allmydata.test.test_util", - "allmydata.test.test_version", ] diff --git a/src/allmydata/util/log.py b/src/allmydata/util/log.py index 11c78a5a2..509deb6a4 100644 --- a/src/allmydata/util/log.py +++ b/src/allmydata/util/log.py @@ -11,6 +11,7 @@ from __future__ import unicode_literals from future.utils import PY2 if PY2: from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from six import ensure_str from pyutil import nummedobj @@ -55,6 +56,7 @@ class LogMixin(object): pmsgid = self._parentmsgid if pmsgid is None: pmsgid = self._grandparentmsgid + kwargs = {ensure_str(k): v for (k, v) in kwargs.items()} msgid = log.msg(msg, facility=facility, parent=pmsgid, *args, **kwargs) if self._parentmsgid is None: self._parentmsgid = msgid diff --git a/src/allmydata/version_checks.py b/src/allmydata/version_checks.py deleted file mode 100644 index d022055ea..000000000 --- a/src/allmydata/version_checks.py +++ /dev/null @@ -1,334 +0,0 @@ -""" -Produce reports about the versions of Python software in use by Tahoe-LAFS -for debugging and auditing purposes. - -Ported to Python 3. -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -__all__ = [ - "PackagingError", - "get_package_versions", - "get_package_versions_string", - "normalized_version", -] - -import os, platform, re, sys, traceback, pkg_resources - -import six - -import distro - -from . import ( - __appname__, - full_version, - branch, -) -from .util import ( - verlib, -) - -if getattr(sys, 'frozen', None): - # "Frozen" python interpreters (i.e., standalone executables - # generated by PyInstaller and other, similar utilities) run - # independently of a traditional setuptools-based packaging - # environment, and so pkg_resources.get_distribution() cannot be - # used in such cases to gather a list of requirements at runtime - # (and because a frozen application is one that has already been - # "installed", an empty list suffices here). - _INSTALL_REQUIRES = [] -else: - _INSTALL_REQUIRES = list( - str(req) - for req - in pkg_resources.get_distribution(__appname__).requires() - ) - -class PackagingError(EnvironmentError): - """ - Raised when there is an error in packaging of Tahoe-LAFS or its - dependencies which makes it impossible to proceed safely. - """ - -def get_package_versions(): - return dict([(k, v) for k, (v, l, c) in _vers_and_locs_list]) - -def get_package_versions_string(show_paths=False, debug=False): - res = [] - for p, (v, loc, comment) in _vers_and_locs_list: - info = str(p) + ": " + str(v) - if comment: - info = info + " [%s]" % str(comment) - if show_paths: - info = info + " (%s)" % str(loc) - res.append(info) - - output = "\n".join(res) + "\n" - - if _cross_check_errors: - output += _get_error_string(_cross_check_errors, debug=debug) - - return output - -_distributor_id_cmdline_re = re.compile("(?:Distributor ID:)\s*(.*)", re.I) -_release_cmdline_re = re.compile("(?:Release:)\s*(.*)", re.I) - -_distributor_id_file_re = re.compile("(?:DISTRIB_ID\s*=)\s*(.*)", re.I) -_release_file_re = re.compile("(?:DISTRIB_RELEASE\s*=)\s*(.*)", re.I) - -_distname = None -_version = None - -def normalized_version(verstr, what=None): - try: - suggested = verlib.suggest_normalized_version(verstr) or verstr - return verlib.NormalizedVersion(suggested) - except verlib.IrrationalVersionError: - raise - except Exception: - cls, value, trace = sys.exc_info() - new_exc = PackagingError("could not parse %s due to %s: %s" - % (what or repr(verstr), cls.__name__, value)) - six.reraise(cls, new_exc, trace) - -def _get_error_string(errors, debug=False): - - msg = "\n%s\n" % ("\n".join(errors),) - if debug: - msg += ( - "\n" - "For debugging purposes, the PYTHONPATH was\n" - " %r\n" - "install_requires was\n" - " %r\n" - "sys.path after importing pkg_resources was\n" - " %s\n" - % ( - os.environ.get('PYTHONPATH'), - _INSTALL_REQUIRES, - (os.pathsep+"\n ").join(sys.path), - ) - ) - return msg - -def _cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list): - """This function returns a list of errors due to any failed cross-checks.""" - - from ._auto_deps import not_import_versionable - - errors = [] - not_pkg_resourceable = ['python', 'platform', __appname__.lower(), 'openssl'] - - for name, (imp_ver, imp_loc, imp_comment) in imported_vers_and_locs_list: - name = name.lower() - if name not in not_pkg_resourceable: - if name not in pkg_resources_vers_and_locs: - if name == "setuptools" and "distribute" in pkg_resources_vers_and_locs: - pr_ver, pr_loc = pkg_resources_vers_and_locs["distribute"] - if not (os.path.normpath(os.path.realpath(pr_loc)) == os.path.normpath(os.path.realpath(imp_loc)) - and imp_comment == "distribute"): - errors.append("Warning: dependency 'setuptools' found to be version %r of 'distribute' from %r " - "by pkg_resources, but 'import setuptools' gave version %r [%s] from %r. " - "A version mismatch is expected, but a location mismatch is not." - % (pr_ver, pr_loc, imp_ver, imp_comment or 'probably *not* distribute', imp_loc)) - else: - errors.append("Warning: dependency %r (version %r imported from %r) was not found by pkg_resources." - % (name, imp_ver, imp_loc)) - continue - - pr_ver, pr_loc = pkg_resources_vers_and_locs[name] - if imp_ver is None and imp_loc is None: - errors.append("Warning: dependency %r could not be imported. pkg_resources thought it should be possible " - "to import version %r from %r.\nThe exception trace was %r." - % (name, pr_ver, pr_loc, imp_comment)) - continue - - # If the pkg_resources version is identical to the imported version, don't attempt - # to normalize them, since it is unnecessary and may fail (ticket #2499). - if imp_ver != 'unknown' and pr_ver == imp_ver: - continue - - try: - pr_normver = normalized_version(pr_ver) - except verlib.IrrationalVersionError: - continue - except Exception as e: - errors.append("Warning: version number %r found for dependency %r by pkg_resources could not be parsed. " - "The version found by import was %r from %r. " - "pkg_resources thought it should be found at %r. " - "The exception was %s: %s" - % (pr_ver, name, imp_ver, imp_loc, pr_loc, e.__class__.__name__, e)) - else: - if imp_ver == 'unknown': - if name not in not_import_versionable: - errors.append("Warning: unexpectedly could not find a version number for dependency %r imported from %r. " - "pkg_resources thought it should be version %r at %r." - % (name, imp_loc, pr_ver, pr_loc)) - else: - try: - imp_normver = normalized_version(imp_ver) - except verlib.IrrationalVersionError: - continue - except Exception as e: - errors.append("Warning: version number %r found for dependency %r (imported from %r) could not be parsed. " - "pkg_resources thought it should be version %r at %r. " - "The exception was %s: %s" - % (imp_ver, name, imp_loc, pr_ver, pr_loc, e.__class__.__name__, e)) - else: - if pr_ver == 'unknown' or (pr_normver != imp_normver): - if not os.path.normpath(os.path.realpath(pr_loc)) == os.path.normpath(os.path.realpath(imp_loc)): - errors.append("Warning: dependency %r found to have version number %r (normalized to %r, from %r) " - "by pkg_resources, but version %r (normalized to %r, from %r) by import." - % (name, pr_ver, str(pr_normver), pr_loc, imp_ver, str(imp_normver), imp_loc)) - - return errors - -def _get_openssl_version(): - try: - from OpenSSL import SSL - return _extract_openssl_version(SSL) - except Exception: - return ("unknown", None, None) - -def _extract_openssl_version(ssl_module): - openssl_version = ssl_module.SSLeay_version(ssl_module.SSLEAY_VERSION) - if openssl_version.startswith('OpenSSL '): - openssl_version = openssl_version[8 :] - - (version, _, comment) = openssl_version.partition(' ') - - try: - openssl_cflags = ssl_module.SSLeay_version(ssl_module.SSLEAY_CFLAGS) - if '-DOPENSSL_NO_HEARTBEATS' in openssl_cflags.split(' '): - comment += ", no heartbeats" - except Exception: - pass - - return (version, None, comment if comment else None) - - -def _get_platform(): - # Our version of platform.platform(), telling us both less and more than the - # Python Standard Library's version does. - # We omit details such as the Linux kernel version number, but we add a - # more detailed and correct rendition of the Linux distribution and - # distribution-version. - if "linux" in platform.system().lower(): - return ( - platform.system() + "-" + - "_".join(distro.linux_distribution()[:2]) + "-" + - platform.machine() + "-" + - "_".join([x for x in platform.architecture() if x]) - ) - else: - return platform.platform() - -def _get_package_versions_and_locations(): - import warnings - from ._auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \ - runtime_warning_messages, warning_imports, ignorable - - def package_dir(srcfile): - return os.path.dirname(os.path.dirname(os.path.normcase(os.path.realpath(srcfile)))) - - # pkg_resources.require returns the distribution that pkg_resources attempted to put - # on sys.path, which can differ from the one that we actually import due to #1258, - # or any other bug that causes sys.path to be set up incorrectly. Therefore we - # must import the packages in order to check their versions and paths. - - # This is to suppress all UserWarnings and various DeprecationWarnings and RuntimeWarnings - # (listed in _auto_deps.py). - - warnings.filterwarnings("ignore", category=UserWarning, append=True) - - for msg in global_deprecation_messages + deprecation_messages: - warnings.filterwarnings("ignore", category=DeprecationWarning, message=msg, append=True) - for msg in runtime_warning_messages: - warnings.filterwarnings("ignore", category=RuntimeWarning, message=msg, append=True) - try: - for modulename in warning_imports: - try: - __import__(modulename) - except (ImportError, SyntaxError): - pass - finally: - # Leave suppressions for UserWarnings and global_deprecation_messages active. - for _ in runtime_warning_messages + deprecation_messages: - warnings.filters.pop() - - packages = [] - pkg_resources_vers_and_locs = dict() - - if not hasattr(sys, 'frozen'): - pkg_resources_vers_and_locs = { - p.project_name.lower(): (str(p.version), p.location) - for p - in pkg_resources.require(_INSTALL_REQUIRES) - } - - def get_version(module): - if hasattr(module, '__version__'): - return str(getattr(module, '__version__')) - elif hasattr(module, 'version'): - ver = getattr(module, 'version') - if isinstance(ver, tuple): - return '.'.join(map(str, ver)) - else: - return str(ver) - else: - return 'unknown' - - for pkgname, modulename in [(__appname__, 'allmydata')] + package_imports: - if modulename: - try: - __import__(modulename) - module = sys.modules[modulename] - except (ImportError, SyntaxError): - etype, emsg, etrace = sys.exc_info() - trace_info = (etype, str(emsg), ([None] + traceback.extract_tb(etrace))[-1]) - packages.append( (pkgname, (None, None, trace_info)) ) - else: - comment = None - if pkgname == __appname__: - comment = "%s: %s" % (branch, full_version) - elif pkgname == 'setuptools' and hasattr(module, '_distribute'): - # distribute does not report its version in any module variables - comment = 'distribute' - ver = get_version(module) - loc = package_dir(module.__file__) - if ver == "unknown" and pkgname in pkg_resources_vers_and_locs: - (pr_ver, pr_loc) = pkg_resources_vers_and_locs[pkgname] - if loc == os.path.normcase(os.path.realpath(pr_loc)): - ver = pr_ver - packages.append( (pkgname, (ver, loc, comment)) ) - elif pkgname == 'python': - packages.append( (pkgname, (platform.python_version(), sys.executable, None)) ) - elif pkgname == 'platform': - packages.append( (pkgname, (_get_platform(), None, None)) ) - elif pkgname == 'OpenSSL': - packages.append( (pkgname, _get_openssl_version()) ) - - cross_check_errors = [] - - if len(pkg_resources_vers_and_locs) > 0: - imported_packages = set([p.lower() for (p, _) in packages]) - extra_packages = [] - - for pr_name, (pr_ver, pr_loc) in pkg_resources_vers_and_locs.items(): - if pr_name not in imported_packages and pr_name not in ignorable: - extra_packages.append( (pr_name, (pr_ver, pr_loc, "according to pkg_resources")) ) - - cross_check_errors = _cross_check(pkg_resources_vers_and_locs, packages) - packages += extra_packages - - return packages, cross_check_errors - - -_vers_and_locs_list, _cross_check_errors = _get_package_versions_and_locations() diff --git a/src/allmydata/web/introweb.py b/src/allmydata/web/introweb.py index f57a5232a..42e353dc1 100644 --- a/src/allmydata/web/introweb.py +++ b/src/allmydata/web/introweb.py @@ -6,7 +6,6 @@ from twisted.python.filepath import FilePath from twisted.web import static import allmydata import json -from allmydata.version_checks import get_package_versions_string from allmydata.util import idlib from allmydata.web.common import ( render_time, @@ -89,7 +88,7 @@ class IntroducerRootElement(Element): self.introducer_service = introducer_service self.node_data_dict = { "my_nodeid": idlib.nodeid_b2a(self.introducer_node.nodeid), - "version": get_package_versions_string(), + "version": allmydata.__full_version__, "import_path": str(allmydata).replace("/", "/ "), # XXX kludge for wrapping "rendered_at": render_time(time.time()), } diff --git a/src/allmydata/web/root.py b/src/allmydata/web/root.py index 91f14bd91..cb5ddc070 100644 --- a/src/allmydata/web/root.py +++ b/src/allmydata/web/root.py @@ -21,7 +21,6 @@ from twisted.web.template import ( ) import allmydata # to display import path -from allmydata.version_checks import get_package_versions_string from allmydata.util import log from allmydata.interfaces import IFileNode from allmydata.web import ( @@ -566,7 +565,7 @@ class RootElement(Element): @renderer def version(self, req, tag): - return tag(get_package_versions_string()) + return tag(allmydata.__full_version__) @renderer def import_path(self, req, tag): diff --git a/src/allmydata/webish.py b/src/allmydata/webish.py index f94d6f7da..b5e310fbc 100644 --- a/src/allmydata/webish.py +++ b/src/allmydata/webish.py @@ -1,13 +1,13 @@ from six import ensure_str -import re, time +import re, time, tempfile -from functools import ( - partial, -) from cgi import ( FieldStorage, ) +from io import ( + BytesIO, +) from twisted.application import service, strports, internet from twisted.web import static @@ -150,17 +150,34 @@ def _logFormatter(logDateTime, request): ) -tahoe_lafs_site = partial( - Site, - requestFactory=TahoeLAFSRequest, - logFormatter=_logFormatter, -) +class TahoeLAFSSite(Site, object): + """ + The HTTP protocol factory used by Tahoe-LAFS. + + Among the behaviors provided: + + * A configurable temporary directory where large request bodies can be + written so they don't stay in memory. + + * A log formatter that writes some access logs but omits capability + strings to help keep them secret. + """ + requestFactory = TahoeLAFSRequest + + def __init__(self, tempdir, *args, **kwargs): + Site.__init__(self, *args, logFormatter=_logFormatter, **kwargs) + self._tempdir = tempdir + + def getContentFile(self, length): + if length is None or length >= 1024 * 1024: + return tempfile.TemporaryFile(dir=self._tempdir) + return BytesIO() class WebishServer(service.MultiService): name = "webish" - def __init__(self, client, webport, nodeurl_path=None, staticdir=None, + def __init__(self, client, webport, tempdir, nodeurl_path=None, staticdir=None, clock=None, now_fn=time.time): service.MultiService.__init__(self) # the 'data' argument to all render() methods default to the Client @@ -170,7 +187,7 @@ class WebishServer(service.MultiService): # time in a deterministic manner. self.root = root.Root(client, clock, now_fn) - self.buildServer(webport, nodeurl_path, staticdir) + self.buildServer(webport, tempdir, nodeurl_path, staticdir) # If set, clock is a twisted.internet.task.Clock that the tests # use to test ophandle expiration. @@ -180,9 +197,9 @@ class WebishServer(service.MultiService): self.root.putChild(b"storage-plugins", StoragePlugins(client)) - def buildServer(self, webport, nodeurl_path, staticdir): + def buildServer(self, webport, tempdir, nodeurl_path, staticdir): self.webport = webport - self.site = tahoe_lafs_site(self.root) + self.site = TahoeLAFSSite(tempdir, self.root) self.staticdir = staticdir # so tests can check if staticdir: self.root.putChild("static", static.File(staticdir)) @@ -260,4 +277,4 @@ class IntroducerWebishServer(WebishServer): def __init__(self, introducer, webport, nodeurl_path=None, staticdir=None): service.MultiService.__init__(self) self.root = introweb.IntroducerRoot(introducer) - self.buildServer(webport, nodeurl_path, staticdir) + self.buildServer(webport, tempfile.tempdir, nodeurl_path, staticdir)