mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-12 16:02:43 +00:00
Merge remote-tracking branch 'origin/master' into 3367.port-uri-to-python3
This commit is contained in:
commit
cdcd28b42f
@ -281,6 +281,10 @@ jobs:
|
||||
|
||||
environment:
|
||||
<<: *UTF_8_ENVIRONMENT
|
||||
# The default trial args include --rterrors which is incompatible with
|
||||
# this reporter on Python 3. So drop that and just specify the
|
||||
# reporter.
|
||||
TAHOE_LAFS_TRIAL_ARGS: "--reporter=subunitv2-file"
|
||||
TAHOE_LAFS_TOX_ENVIRONMENT: "py36"
|
||||
|
||||
|
||||
|
@ -65,7 +65,7 @@ TIMEOUT="timeout --kill-after 1m 15m"
|
||||
# Send the output directly to a file because transporting the binary subunit2
|
||||
# via tox and then scraping it out is hideous and failure prone.
|
||||
export SUBUNITREPORTER_OUTPUT_PATH="${SUBUNIT2}"
|
||||
export TAHOE_LAFS_TRIAL_ARGS="--reporter=subunitv2-file --rterrors"
|
||||
export TAHOE_LAFS_TRIAL_ARGS="${TAHOE_LAFS_TRIAL_ARGS:---reporter=subunitv2-file --rterrors}"
|
||||
export PIP_NO_INDEX="1"
|
||||
|
||||
if [ "${ALLOWED_FAILURE}" = "yes" ]; then
|
||||
@ -88,9 +88,5 @@ if [ -n "${ARTIFACTS}" ]; then
|
||||
|
||||
# Create a junitxml results area.
|
||||
mkdir -p "$(dirname "${JUNITXML}")"
|
||||
# Always succeed even if subunit2junitxml fails. subunit2junitxml signals
|
||||
# failure if the stream it is processing contains test failures. This is
|
||||
# not what we care about. If we cared about it, the test command above
|
||||
# would have signalled failure already and we wouldn't be here.
|
||||
"${BOOTSTRAP_VENV}"/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || true
|
||||
"${BOOTSTRAP_VENV}"/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || "${alternative}"
|
||||
fi
|
||||
|
@ -1,218 +0,0 @@
|
||||
allmydata.test.mutable.test_datahandle.DataHandle.test_datahandle_get_size
|
||||
allmydata.test.mutable.test_datahandle.DataHandle.test_datahandle_get_size_out_of_order
|
||||
allmydata.test.mutable.test_exceptions.Exceptions.test_repr
|
||||
allmydata.test.mutable.test_filehandle.FileHandle.test_close
|
||||
allmydata.test.mutable.test_filehandle.FileHandle.test_filehandle_file
|
||||
allmydata.test.mutable.test_filehandle.FileHandle.test_filehandle_get_size
|
||||
allmydata.test.mutable.test_filehandle.FileHandle.test_filehandle_get_size_out_of_order
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_1s
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_25s
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_day
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_future_5_minutes
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_hours
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_month
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_year
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_parse_space
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_space
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_time
|
||||
allmydata.test.test_backupdb.BackupDB.test_basic
|
||||
allmydata.test.test_backupdb.BackupDB.test_upgrade_v1_v2
|
||||
allmydata.test.test_backupdb.BackupDB.test_wrong_version
|
||||
allmydata.test.test_base32.Base32.test_a2b
|
||||
allmydata.test.test_base32.Base32.test_a2b_b2a_match_Pythons
|
||||
allmydata.test.test_base32.Base32.test_b2a
|
||||
allmydata.test.test_base32.Base32.test_b2a_or_none
|
||||
allmydata.test.test_base62.Base62.test_ende_0x00
|
||||
allmydata.test.test_base62.Base62.test_ende_0x000000
|
||||
allmydata.test.test_base62.Base62.test_ende_0x01
|
||||
allmydata.test.test_base62.Base62.test_ende_0x0100
|
||||
allmydata.test.test_base62.Base62.test_ende_0x010000
|
||||
allmydata.test.test_base62.Base62.test_ende_longrandstr
|
||||
allmydata.test.test_base62.Base62.test_ende_randstr
|
||||
allmydata.test.test_base62.Base62.test_known_values
|
||||
allmydata.test.test_base62.Base62.test_num_octets_that_encode_to_this_many_chars
|
||||
allmydata.test.test_base62.Base62.test_odd_sizes
|
||||
allmydata.test.test_base62.Base62.test_roundtrip
|
||||
allmydata.test.test_crypto.TestEd25519.test_deserialize_private_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_deserialize_public_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_key_serialization
|
||||
allmydata.test.test_crypto.TestEd25519.test_sign_invalid_pubkey
|
||||
allmydata.test.test_crypto.TestEd25519.test_signature_data_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_signature_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_signed_data_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_verify_invalid_pubkey
|
||||
allmydata.test.test_crypto.TestRegression.test_aes_no_iv_process_long_input
|
||||
allmydata.test.test_crypto.TestRegression.test_aes_no_iv_process_short_input
|
||||
allmydata.test.test_crypto.TestRegression.test_aes_with_iv_process_long_input
|
||||
allmydata.test.test_crypto.TestRegression.test_aes_with_iv_process_short_input
|
||||
allmydata.test.test_crypto.TestRegression.test_decode_ed15519_keypair
|
||||
allmydata.test.test_crypto.TestRegression.test_decode_rsa_keypair
|
||||
allmydata.test.test_crypto.TestRegression.test_encrypt_data_not_bytes
|
||||
allmydata.test.test_crypto.TestRegression.test_incorrect_iv_size
|
||||
allmydata.test.test_crypto.TestRegression.test_iv_not_bytes
|
||||
allmydata.test.test_crypto.TestRegression.test_key_incorrect_size
|
||||
allmydata.test.test_crypto.TestRegression.test_old_start_up_test
|
||||
allmydata.test.test_crypto.TestRsa.test_keys
|
||||
allmydata.test.test_crypto.TestRsa.test_sign_invalid_pubkey
|
||||
allmydata.test.test_crypto.TestRsa.test_verify_invalid_pubkey
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_bad
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_entire_string
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_good
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_partial
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_zero
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_failure
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_gather_results
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_success
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_wait_for_delayed_calls
|
||||
allmydata.test.test_dictutil.DictUtil.test_auxdict
|
||||
allmydata.test.test_dictutil.DictUtil.test_dict_of_sets
|
||||
allmydata.test.test_happiness.Happiness.test_100
|
||||
allmydata.test.test_happiness.Happiness.test_calc_happy
|
||||
allmydata.test.test_happiness.Happiness.test_everything_broken
|
||||
allmydata.test.test_happiness.Happiness.test_hypothesis0
|
||||
allmydata.test.test_happiness.Happiness.test_hypothesis_0
|
||||
allmydata.test.test_happiness.Happiness.test_hypothesis_1
|
||||
allmydata.test.test_happiness.Happiness.test_placement_1
|
||||
allmydata.test.test_happiness.Happiness.test_placement_simple
|
||||
allmydata.test.test_happiness.Happiness.test_redistribute
|
||||
allmydata.test.test_happiness.Happiness.test_unhappy
|
||||
allmydata.test.test_happiness.HappinessUtils.test_residual_0
|
||||
allmydata.test.test_happiness.HappinessUtils.test_trivial_flow_graph
|
||||
allmydata.test.test_happiness.HappinessUtils.test_trivial_maximum_graph
|
||||
allmydata.test.test_happiness.PlacementTests.test_hypothesis_unhappy
|
||||
allmydata.test.test_happiness.PlacementTests.test_more_hypothesis
|
||||
allmydata.test.test_hashtree.Complete.test_create
|
||||
allmydata.test.test_hashtree.Complete.test_dump
|
||||
allmydata.test.test_hashtree.Complete.test_needed_hashes
|
||||
allmydata.test.test_hashtree.Incomplete.test_check
|
||||
allmydata.test.test_hashtree.Incomplete.test_create
|
||||
allmydata.test.test_hashtree.Incomplete.test_depth_of
|
||||
allmydata.test.test_hashtree.Incomplete.test_large
|
||||
allmydata.test.test_hashtree.Incomplete.test_needed_hashes
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_chk
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_hashers
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_known_answers
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_random_key
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_sha256d
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_sha256d_truncated
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_timing_safe_compare
|
||||
allmydata.test.test_humanreadable.HumanReadable.test_repr
|
||||
allmydata.test.test_iputil.GcUtil.test_gc_after_allocations
|
||||
allmydata.test.test_iputil.GcUtil.test_release_delays_gc
|
||||
allmydata.test.test_iputil.ListAddresses.test_get_local_ip_for
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async_mock_cygwin
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async_mock_ifconfig
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async_mock_ip_addr
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async_mock_route
|
||||
allmydata.test.test_iputil.ListenOnUsed.test_random_port
|
||||
allmydata.test.test_iputil.ListenOnUsed.test_specific_port
|
||||
allmydata.test.test_log.Log.test_default_facility
|
||||
allmydata.test.test_log.Log.test_err
|
||||
allmydata.test.test_log.Log.test_grandparent_id
|
||||
allmydata.test.test_log.Log.test_no_prefix
|
||||
allmydata.test.test_log.Log.test_numming
|
||||
allmydata.test.test_log.Log.test_parent_id
|
||||
allmydata.test.test_log.Log.test_with_bytes_prefix
|
||||
allmydata.test.test_log.Log.test_with_prefix
|
||||
allmydata.test.test_netstring.Netstring.test_encode
|
||||
allmydata.test.test_netstring.Netstring.test_extra
|
||||
allmydata.test.test_netstring.Netstring.test_nested
|
||||
allmydata.test.test_netstring.Netstring.test_split
|
||||
allmydata.test.test_observer.Observer.test_lazy_oneshot
|
||||
allmydata.test.test_observer.Observer.test_observerlist
|
||||
allmydata.test.test_observer.Observer.test_oneshot
|
||||
allmydata.test.test_observer.Observer.test_oneshot_fireagain
|
||||
allmydata.test.test_pipeline.Pipeline.test_basic
|
||||
allmydata.test.test_pipeline.Pipeline.test_errors
|
||||
allmydata.test.test_pipeline.Pipeline.test_errors2
|
||||
allmydata.test.test_python3.Python3PortingEffortTests.test_finished_porting
|
||||
allmydata.test.test_python3.Python3PortingEffortTests.test_ported_modules_distinct
|
||||
allmydata.test.test_python3.Python3PortingEffortTests.test_ported_modules_exist
|
||||
allmydata.test.test_spans.ByteSpans.test_basic
|
||||
allmydata.test.test_spans.ByteSpans.test_large
|
||||
allmydata.test.test_spans.ByteSpans.test_math
|
||||
allmydata.test.test_spans.ByteSpans.test_overlap
|
||||
allmydata.test.test_spans.ByteSpans.test_random
|
||||
allmydata.test.test_spans.StringSpans.test_basic
|
||||
allmydata.test.test_spans.StringSpans.test_random
|
||||
allmydata.test.test_spans.StringSpans.test_test
|
||||
allmydata.test.test_statistics.Statistics.test_binomial_coeff
|
||||
allmydata.test.test_statistics.Statistics.test_binomial_distribution_pmf
|
||||
allmydata.test.test_statistics.Statistics.test_convolve
|
||||
allmydata.test.test_statistics.Statistics.test_find_k
|
||||
allmydata.test.test_statistics.Statistics.test_pr_backup_file_loss
|
||||
allmydata.test.test_statistics.Statistics.test_pr_file_loss
|
||||
allmydata.test.test_statistics.Statistics.test_repair_cost
|
||||
allmydata.test.test_statistics.Statistics.test_repair_count_pmf
|
||||
allmydata.test.test_statistics.Statistics.test_survival_pmf
|
||||
allmydata.test.test_stats.CPUUsage.test_monitor
|
||||
allmydata.test.test_testing.FakeWebTest.test_download_no_arg
|
||||
allmydata.test.test_time_format.TimeFormat.test_epoch
|
||||
allmydata.test.test_time_format.TimeFormat.test_epoch_in_London
|
||||
allmydata.test.test_time_format.TimeFormat.test_format_delta
|
||||
allmydata.test.test_time_format.TimeFormat.test_format_time
|
||||
allmydata.test.test_time_format.TimeFormat.test_format_time_y2038
|
||||
allmydata.test.test_time_format.TimeFormat.test_iso_utc
|
||||
allmydata.test.test_time_format.TimeFormat.test_parse_date
|
||||
allmydata.test.test_time_format.TimeFormat.test_parse_duration
|
||||
allmydata.test.test_uri.CHKFile.test_pack
|
||||
allmydata.test.test_uri.CHKFile.test_pack_badly
|
||||
allmydata.test.test_uri.Compare.test_compare
|
||||
allmydata.test.test_uri.Compare.test_has_uri_prefix
|
||||
allmydata.test.test_uri.Compare.test_is_literal_file_uri
|
||||
allmydata.test.test_uri.Compare.test_is_uri
|
||||
allmydata.test.test_uri.Constraint.test_constraint
|
||||
allmydata.test.test_uri.Dirnode.test_immutable
|
||||
allmydata.test.test_uri.Dirnode.test_literal
|
||||
allmydata.test.test_uri.Dirnode.test_mdmf
|
||||
allmydata.test.test_uri.Dirnode.test_mdmf_attenuation
|
||||
allmydata.test.test_uri.Dirnode.test_mdmf_verifier
|
||||
allmydata.test.test_uri.Dirnode.test_pack
|
||||
allmydata.test.test_uri.Extension.test_pack
|
||||
allmydata.test.test_uri.Literal.test_empty
|
||||
allmydata.test.test_uri.Literal.test_nonascii
|
||||
allmydata.test.test_uri.Literal.test_pack
|
||||
allmydata.test.test_uri.Mutable.test_create_readonly_mdmf_cap_from_verifycap
|
||||
allmydata.test.test_uri.Mutable.test_create_writeable_mdmf_cap_from_readcap
|
||||
allmydata.test.test_uri.Mutable.test_create_writeable_mdmf_cap_from_verifycap
|
||||
allmydata.test.test_uri.Mutable.test_mdmf_cap_ignore_extensions
|
||||
allmydata.test.test_uri.Mutable.test_mdmf_from_string
|
||||
allmydata.test.test_uri.Mutable.test_mdmf_verifier_cap
|
||||
allmydata.test.test_uri.Mutable.test_pack
|
||||
allmydata.test.test_uri.Mutable.test_readonly_mdmf_cap
|
||||
allmydata.test.test_uri.Mutable.test_writeable_mdmf_cap
|
||||
allmydata.test.test_uri.Unknown.test_from_future
|
||||
allmydata.test.test_util.FileUtil.test_abspath_expanduser_unicode
|
||||
allmydata.test.test_util.FileUtil.test_create_long_path
|
||||
allmydata.test.test_util.FileUtil.test_disk_stats
|
||||
allmydata.test.test_util.FileUtil.test_disk_stats_avail_nonnegative
|
||||
allmydata.test.test_util.FileUtil.test_du
|
||||
allmydata.test.test_util.FileUtil.test_encrypted_tempfile
|
||||
allmydata.test.test_util.FileUtil.test_get_pathinfo
|
||||
allmydata.test.test_util.FileUtil.test_get_pathinfo_symlink
|
||||
allmydata.test.test_util.FileUtil.test_make_dirs_with_absolute_mode
|
||||
allmydata.test.test_util.FileUtil.test_remove_if_possible
|
||||
allmydata.test.test_util.FileUtil.test_rename
|
||||
allmydata.test.test_util.FileUtil.test_rename_no_overwrite
|
||||
allmydata.test.test_util.FileUtil.test_replace_file
|
||||
allmydata.test.test_util.FileUtil.test_rm_dir
|
||||
allmydata.test.test_util.FileUtil.test_windows_expanduser_win7
|
||||
allmydata.test.test_util.FileUtil.test_windows_expanduser_xp
|
||||
allmydata.test.test_util.FileUtil.test_write_atomically
|
||||
allmydata.test.test_util.IDLib.test_nodeid_b2a
|
||||
allmydata.test.test_util.Math.test_round_sigfigs
|
||||
allmydata.test.test_util.PollMixinTests.test_PollMixin_False_then_True
|
||||
allmydata.test.test_util.PollMixinTests.test_PollMixin_True
|
||||
allmydata.test.test_util.PollMixinTests.test_timeout
|
||||
allmydata.test.test_util.YAML.test_convert
|
||||
allmydata.test.test_version.CheckRequirement.test_cross_check
|
||||
allmydata.test.test_version.CheckRequirement.test_cross_check_unparseable_versions
|
||||
allmydata.test.test_version.CheckRequirement.test_extract_openssl_version
|
||||
allmydata.test.test_version.CheckRequirement.test_packages_from_pkg_resources
|
||||
allmydata.test.test_version.T.test_report_import_error
|
||||
allmydata.test.test_version.VersionTestCase.test_basic_versions
|
||||
allmydata.test.test_version.VersionTestCase.test_comparison
|
||||
allmydata.test.test_version.VersionTestCase.test_from_parts
|
||||
allmydata.test.test_version.VersionTestCase.test_irrational_versions
|
||||
allmydata.test.test_version.VersionTestCase.test_suggest_normalized_version
|
@ -1,409 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
'''Ratchet up passing tests, or ratchet down failing tests.
|
||||
|
||||
Usage:
|
||||
|
||||
ratchet.py <"up" or "down"> <junitxml file path> <tracking file path>
|
||||
|
||||
This script helps when you expect a large test suite to fail spectactularly in
|
||||
some environment, and you want to gradually improve the situation with minimal
|
||||
impact to forward development of the same codebase for other environments. The
|
||||
initial and primary usecase is porting from Python 2 to Python 3.
|
||||
|
||||
The idea is to emit JUnit XML from your test runner, and then invoke ratchet.py
|
||||
to consume this XML output and operate on a so-called "tracking" file. When
|
||||
ratcheting up passing tests, the tracking file will contain a list of tests,
|
||||
one per line, that passed. When ratching down, the tracking file contains a
|
||||
list of failing tests. On each subsequent run, ratchet.py will compare the
|
||||
prior results in the tracking file with the new results in the XML, and will
|
||||
report on both welcome and unwelcome changes. It will modify the tracking file
|
||||
in the case of welcome changes, and therein lies the ratcheting.
|
||||
|
||||
The exit codes are:
|
||||
|
||||
0 - no changes observed
|
||||
1 - changes observed, whether welcome or unwelcome
|
||||
2 - invocation error
|
||||
|
||||
If <junitxml file path> does not exist, you'll get a FileNotFoundError:
|
||||
|
||||
>>> _test('up', None, None) # doctest: +ELLIPSIS
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
FileNotFoundError: ...
|
||||
|
||||
If <tracking file path> does not exist, that's fine:
|
||||
|
||||
>>> _test('up', '1', None)
|
||||
Some tests not required to pass did:
|
||||
c0.t
|
||||
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
|
||||
Eep! 0 test(s) were required to pass, but instead 1 did. 🐭
|
||||
|
||||
Same if you're ratcheting down:
|
||||
|
||||
>>> _test('down', '1', None)
|
||||
All and only tests expected to fail did. 💃
|
||||
|
||||
If the test run has the same output as last time, it's all good:
|
||||
|
||||
>>> _test('up', '01001110', '01001110')
|
||||
All and only tests required to pass did. 💃
|
||||
|
||||
>>> _test('down', '01001110', '10110001')
|
||||
All and only tests expected to fail did. 💃
|
||||
|
||||
If there's a welcome change, that's noted:
|
||||
|
||||
>>> _test('up', '0101', '0100')
|
||||
Some tests not required to pass did:
|
||||
c3.t
|
||||
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
|
||||
Eep! 1 test(s) were required to pass, but instead 2 did. 🐭
|
||||
|
||||
>>> _test('down', '0011', '1110')
|
||||
Some tests expected to fail didn't:
|
||||
c2.t
|
||||
Conveniently, they have been removed from `<tracking_path>` for you. Perhaps commit that?
|
||||
Eep! 3 test(s) were expected to fail, but instead 2 did. 🐭
|
||||
|
||||
And if there is an unwelcome change, that is noted as well:
|
||||
|
||||
>>> _test('up', '1101', '1111')
|
||||
Some tests required to pass didn't:
|
||||
c2.t
|
||||
Eep! 4 test(s) were required to pass, but instead 3 did. 🐭
|
||||
|
||||
>>> _test('down', '0000', '1101')
|
||||
Some tests not expected to fail did:
|
||||
c2.t
|
||||
Eep! 3 test(s) were expected to fail, but instead 4 did. 🐭
|
||||
|
||||
And if there are both welcome and unwelcome changes, they are both noted:
|
||||
|
||||
>>> _test('up', '1101', '1011')
|
||||
Some tests not required to pass did:
|
||||
c1.t
|
||||
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
|
||||
Some tests required to pass didn't:
|
||||
c2.t
|
||||
Eep! 3 test(s) were required to pass, but instead 3 did. 🐭
|
||||
|
||||
>>> _test('down', '0100', '1100')
|
||||
Some tests not expected to fail did:
|
||||
c2.t
|
||||
c3.t
|
||||
Some tests expected to fail didn't:
|
||||
c1.t
|
||||
Conveniently, they have been removed from `<tracking_path>` for you. Perhaps commit that?
|
||||
Eep! 2 test(s) were expected to fail, but instead 3 did. 🐭
|
||||
|
||||
|
||||
To test ratchet.py itself:
|
||||
|
||||
python3 -m doctest ratchet.py
|
||||
|
||||
'''
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
import xml.etree.ElementTree as Etree
|
||||
|
||||
|
||||
class JUnitXMLFile(object):
|
||||
'''Represent a file containing test results in JUnit XML format.
|
||||
|
||||
>>> eg = _mktemp_junitxml('0100111')
|
||||
>>> results = JUnitXMLFile(eg.name).parse()
|
||||
>>> results.failed
|
||||
['c0.t', 'c2.t', 'c3.t']
|
||||
>>> results.passed
|
||||
['c1.t', 'c4.t', 'c5.t', 'c6.t']
|
||||
|
||||
'''
|
||||
|
||||
def __init__(self, filepath):
|
||||
self.filepath = filepath
|
||||
self.failed = []
|
||||
self.failed_aggregates = {}
|
||||
self.stderr_output = []
|
||||
self.passed = []
|
||||
self._tree = None
|
||||
|
||||
def parse(self):
|
||||
if self._tree:
|
||||
raise RuntimeError('already parsed')
|
||||
self._tree = Etree.parse(self.filepath)
|
||||
for testcase in self._tree.findall('testcase'):
|
||||
self.process_testcase(testcase)
|
||||
return self
|
||||
|
||||
def process_testcase(self, case):
|
||||
key = self.case_key(case)
|
||||
|
||||
# look at children but throw away stderr output
|
||||
nonpassing = [c for c in case if not c.tag == 'system-err']
|
||||
n = len(nonpassing)
|
||||
if n > 1:
|
||||
raise RuntimeError(f'multiple results for {key}: {nonpassing}')
|
||||
elif n == 1:
|
||||
result = nonpassing.pop()
|
||||
self.failed.append(key)
|
||||
message = result.get('message')
|
||||
self.failed_aggregates.setdefault(message, []).append(key)
|
||||
else:
|
||||
self.passed.append(key)
|
||||
|
||||
@staticmethod
|
||||
def case_key(case):
|
||||
return f'{case.get("classname")}.{case.get("name")}'
|
||||
|
||||
def report(self, details=False):
|
||||
for k, v in sorted(
|
||||
self.failed_aggregates.items(),
|
||||
key = lambda i: len(i[1]),
|
||||
reverse=True):
|
||||
print(f'# {k}')
|
||||
for t in v:
|
||||
print(f' - {t}')
|
||||
|
||||
|
||||
def load_previous_results(txt):
|
||||
try:
|
||||
previous_results = open(txt).read()
|
||||
except FileNotFoundError:
|
||||
previous_results = ''
|
||||
parsed = set()
|
||||
for line in previous_results.splitlines():
|
||||
if not line or line.startswith('#'):
|
||||
continue
|
||||
parsed.add(line)
|
||||
return parsed
|
||||
|
||||
|
||||
def print_tests(tests):
|
||||
for test in sorted(tests):
|
||||
print(' ', test)
|
||||
|
||||
|
||||
def ratchet_up_passing(tracking_path, tests):
|
||||
try:
|
||||
old = set(open(tracking_path, 'r'))
|
||||
except FileNotFoundError:
|
||||
old = set()
|
||||
new = set(t + '\n' for t in tests)
|
||||
merged = sorted(old | new)
|
||||
open(tracking_path, 'w+').writelines(merged)
|
||||
|
||||
|
||||
def ratchet_down_failing(tracking_path, tests):
|
||||
new = set(t + '\n' for t in tests)
|
||||
open(tracking_path, 'w+').writelines(sorted(new))
|
||||
|
||||
|
||||
def main(direction, junitxml_path, tracking_path):
|
||||
'''Takes a string indicating which direction to ratchet, "up" or "down,"
|
||||
and two paths, one to test-runner output in JUnit XML format, the other to
|
||||
a file tracking test results (one test case dotted name per line). Walk the
|
||||
former looking for the latter, and react appropriately.
|
||||
|
||||
>>> inp = _mktemp_junitxml('0100111')
|
||||
>>> out = _mktemp_tracking('0000000')
|
||||
>>> _test_main('up', inp.name, out.name)
|
||||
Some tests not required to pass did:
|
||||
c1.t
|
||||
c4.t
|
||||
c5.t
|
||||
c6.t
|
||||
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
|
||||
Eep! 0 test(s) were required to pass, but instead 4 did. 🐭
|
||||
|
||||
'''
|
||||
|
||||
results = JUnitXMLFile(junitxml_path).parse()
|
||||
|
||||
if tracking_path == '...':
|
||||
# Shortcut to aid in debugging XML parsing issues.
|
||||
results.report()
|
||||
return
|
||||
|
||||
previous = load_previous_results(tracking_path)
|
||||
current = set(results.passed if direction == 'up' else results.failed)
|
||||
|
||||
subjunctive = {'up': 'required to pass', 'down': 'expected to fail'}[direction]
|
||||
ratchet = None
|
||||
|
||||
too_many = current - previous
|
||||
if too_many:
|
||||
print(f'Some tests not {subjunctive} did:')
|
||||
print_tests(too_many)
|
||||
if direction == 'up':
|
||||
# Too many passing tests is good -- let's do more of those!
|
||||
ratchet_up_passing(tracking_path, current)
|
||||
print(f'Conveniently, they have been added to `{tracking_path}` for you. Perhaps commit that?')
|
||||
|
||||
not_enough = previous - current
|
||||
if not_enough:
|
||||
print(f'Some tests {subjunctive} didn\'t:')
|
||||
print_tests(not_enough)
|
||||
if direction == 'down':
|
||||
# Not enough failing tests is good -- let's do more of those!
|
||||
ratchet_down_failing(tracking_path, current)
|
||||
print(f'Conveniently, they have been removed from `{tracking_path}` for you. Perhaps commit that?')
|
||||
|
||||
if too_many or not_enough:
|
||||
print(f'Eep! {len(previous)} test(s) were {subjunctive}, but instead {len(current)} did. 🐭')
|
||||
return 1
|
||||
|
||||
print(f'All and only tests {subjunctive} did. 💃')
|
||||
return 0
|
||||
|
||||
|
||||
# When called as an executable ...
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
direction, junitxml_path, tracking_path = sys.argv[1:4]
|
||||
if direction not in ('up', 'down'):
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
doc = '\n'.join(__doc__.splitlines()[:6])
|
||||
doc = re.sub(' ratchet.py', f' {sys.argv[0]}', doc)
|
||||
print(doc, file=sys.stderr)
|
||||
exit_code = 2
|
||||
else:
|
||||
exit_code = main(direction, junitxml_path, tracking_path)
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
# Helpers for when called under doctest ...
|
||||
|
||||
def _test(*a):
|
||||
return _test_main(*_mk(*a))
|
||||
|
||||
|
||||
def _test_main(direction, junitxml, tracking):
|
||||
'''Takes a string 'up' or 'down' and paths to (or open file objects for)
|
||||
the JUnit XML and tracking files to use for this test run. Captures and
|
||||
emits stdout (slightly modified) for inspection via doctest.'''
|
||||
junitxml_path = junitxml.name if hasattr(junitxml, 'name') else junitxml
|
||||
tracking_path = tracking.name if hasattr(tracking, 'name') else tracking
|
||||
|
||||
old_stdout = sys.stdout
|
||||
sys.stdout = io.StringIO()
|
||||
try:
|
||||
main(direction, junitxml_path, tracking_path)
|
||||
finally:
|
||||
sys.stdout.seek(0)
|
||||
out = sys.stdout.read()
|
||||
out = re.sub('`.*?`', '`<tracking_path>`', out).strip()
|
||||
sys.stdout = old_stdout
|
||||
print(out)
|
||||
|
||||
|
||||
class _PotentialFile(object):
|
||||
'''Represent a file that we are able to create but which doesn't exist yet,
|
||||
and which, if we create it, will be automatically torn down when the test
|
||||
run is over.'''
|
||||
|
||||
def __init__(self, filename):
|
||||
self.d = tempfile.TemporaryDirectory()
|
||||
self.name = os.path.join(self.d.name, filename)
|
||||
|
||||
|
||||
def _mk(direction, spec_junitxml, spec_tracking):
|
||||
'''Takes a string 'up' or 'down' and two bit strings specifying the state
|
||||
of the JUnit XML results file and the tracking file to set up for this test
|
||||
case. Returns the direction (unharmed) and two file-ish objects.
|
||||
|
||||
If a spec string is None the corresponding return value will be a
|
||||
_PotentialFile object, which has a .name attribute (like a true file
|
||||
object) that points to a file that does not exist, but could.
|
||||
|
||||
The reason not to simply return the path in all cases is that the file
|
||||
objects are actually temporary file objects that destroy the underlying
|
||||
file when they go out of scope, and we want to keep the underlying file
|
||||
around until the end of the test run.'''
|
||||
|
||||
if None not in(spec_junitxml, spec_tracking):
|
||||
if len(spec_junitxml) != len(spec_tracking):
|
||||
raise ValueError('if both given, must be the same length: `{spec_junitxml}` and `{spec_tracking}`')
|
||||
if spec_junitxml is None:
|
||||
junitxml_fp = _PotentialFile('results.xml')
|
||||
else:
|
||||
junitxml_fp = _mktemp_junitxml(spec_junitxml)
|
||||
if spec_tracking is None:
|
||||
tracking_fp = _PotentialFile('tracking')
|
||||
else:
|
||||
tracking_fp = _mktemp_tracking(spec_tracking)
|
||||
return direction, junitxml_fp, tracking_fp
|
||||
|
||||
|
||||
def _mktemp_junitxml(spec):
|
||||
'''Test helper to generate a raw JUnit XML file.
|
||||
|
||||
>>> fp = _mktemp_junitxml('00101')
|
||||
>>> open(fp.name).read()[:11]
|
||||
'<testsuite>'
|
||||
|
||||
'''
|
||||
fp = tempfile.NamedTemporaryFile()
|
||||
fp.write(b'<testsuite>')
|
||||
|
||||
passed = '''\
|
||||
<testcase classname="c{i}" name="t"></testcase>
|
||||
'''
|
||||
failed = '''\
|
||||
<testcase classname="c{i}" name="t">
|
||||
<failure>Traceback (most recent call last):
|
||||
File "/foo/bar/baz/buz.py", line 1, in <module>
|
||||
NameError: name 'heck' is not defined
|
||||
</failure>
|
||||
</testcase>
|
||||
'''
|
||||
|
||||
i = 0
|
||||
for c in spec:
|
||||
if c == '0':
|
||||
out = failed
|
||||
elif c == '1':
|
||||
out = passed
|
||||
else:
|
||||
raise ValueError(f'bad c: `{c}`')
|
||||
fp.write(out.format(i=i).encode('utf8'))
|
||||
i += 1
|
||||
|
||||
fp.write(b'</testsuite>')
|
||||
fp.flush()
|
||||
return fp
|
||||
|
||||
|
||||
def _mktemp_tracking(spec):
|
||||
'''Test helper to prefabricate a tracking file.
|
||||
|
||||
>>> fp = _mktemp_tracking('01101')
|
||||
>>> print(open(fp.name).read()[:-1])
|
||||
c1.t
|
||||
c2.t
|
||||
c4.t
|
||||
|
||||
'''
|
||||
fp = tempfile.NamedTemporaryFile()
|
||||
|
||||
i = 0
|
||||
for c in spec:
|
||||
if c == '0':
|
||||
pass
|
||||
elif c == '1':
|
||||
fp.write(f'c{i}.t\n'.encode('utf8'))
|
||||
else:
|
||||
raise ValueError(f'bad c: `{c}`')
|
||||
i += 1
|
||||
|
||||
fp.flush()
|
||||
return fp
|
@ -1,45 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euxo pipefail
|
||||
tracking_filename="ratchet-passing"
|
||||
|
||||
# Start somewhere predictable.
|
||||
cd "$(dirname $0)"
|
||||
base=$(pwd)
|
||||
|
||||
# Actually, though, trial outputs some things that are only gitignored in the project root.
|
||||
cd "../.."
|
||||
|
||||
# Since both of the next calls are expected to exit non-0, relax our guard.
|
||||
set +e
|
||||
trial --reporter=subunitv2-file allmydata
|
||||
subunit2junitxml < "${SUBUNITREPORTER_OUTPUT_PATH}" > "$base/results.xml"
|
||||
set -e
|
||||
|
||||
# Okay, now we're clear.
|
||||
cd "$base"
|
||||
|
||||
# Make sure ratchet.py itself is clean.
|
||||
python3 -m doctest ratchet.py
|
||||
|
||||
# Now see about Tahoe-LAFS (also expected to fail) ...
|
||||
set +e
|
||||
python3 ratchet.py up results.xml "$tracking_filename"
|
||||
code=$?
|
||||
set -e
|
||||
|
||||
# Emit a diff of the tracking file, to aid in the situation where changes are
|
||||
# not discovered until CI (where TERM might `dumb`).
|
||||
if [ $TERM = 'dumb' ]; then
|
||||
export TERM=ansi
|
||||
fi
|
||||
|
||||
echo "The ${tracking_filename} diff is:"
|
||||
echo "================================="
|
||||
# "git diff" gets pretty confused in this execution context when trying to
|
||||
# write to stdout. Somehow it fails with SIGTTOU.
|
||||
git diff -- "${tracking_filename}" > tracking.diff
|
||||
cat tracking.diff
|
||||
echo "================================="
|
||||
|
||||
echo "Exiting with code ${code} from ratchet.py."
|
||||
exit ${code}
|
1
newsfragments/3316.minor
Normal file
1
newsfragments/3316.minor
Normal file
@ -0,0 +1 @@
|
||||
Port checker result pages' rendering from nevow to twisted web templates.
|
@ -0,0 +1 @@
|
||||
|
0
newsfragments/3373.minor
Normal file
0
newsfragments/3373.minor
Normal file
0
newsfragments/3376.minor
Normal file
0
newsfragments/3376.minor
Normal file
0
newsfragments/3380.minor
Normal file
0
newsfragments/3380.minor
Normal file
@ -1,5 +1,5 @@
|
||||
"""Directory Node implementation."""
|
||||
import time, unicodedata
|
||||
import time
|
||||
|
||||
from zope.interface import implementer
|
||||
from twisted.internet import defer
|
||||
@ -18,7 +18,7 @@ from allmydata.check_results import DeepCheckResults, \
|
||||
DeepCheckAndRepairResults
|
||||
from allmydata.monitor import Monitor
|
||||
from allmydata.util import hashutil, base32, log
|
||||
from allmydata.util.encodingutil import quote_output
|
||||
from allmydata.util.encodingutil import quote_output, normalize
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.netstring import netstring, split_netstring
|
||||
from allmydata.util.consumer import download_to_data
|
||||
@ -101,12 +101,6 @@ def update_metadata(metadata, new_metadata, now):
|
||||
return metadata
|
||||
|
||||
|
||||
# 'x' at the end of a variable name indicates that it holds a Unicode string that may not
|
||||
# be NFC-normalized.
|
||||
|
||||
def normalize(namex):
|
||||
return unicodedata.normalize('NFC', namex)
|
||||
|
||||
# TODO: {Deleter,MetadataSetter,Adder}.modify all start by unpacking the
|
||||
# contents and end by repacking them. It might be better to apply them to
|
||||
# the unpacked contents.
|
||||
|
@ -12,12 +12,18 @@ from __future__ import print_function
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
from past.builtins import unicode
|
||||
|
||||
import os
|
||||
import time
|
||||
import signal
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.python import failure
|
||||
from twisted.trial import unittest
|
||||
|
||||
from ..util.assertutil import precondition
|
||||
from ..util.encodingutil import unicode_platform, get_filesystem_encoding
|
||||
|
||||
|
||||
class TimezoneMixin(object):
|
||||
@ -65,3 +71,44 @@ class SignalMixin(object):
|
||||
if self.sigchldHandler:
|
||||
signal.signal(signal.SIGCHLD, self.sigchldHandler)
|
||||
return super(SignalMixin, self).tearDown()
|
||||
|
||||
|
||||
class ShouldFailMixin(object):
|
||||
|
||||
def shouldFail(self, expected_failure, which, substring,
|
||||
callable, *args, **kwargs):
|
||||
assert substring is None or isinstance(substring, (bytes, unicode))
|
||||
d = defer.maybeDeferred(callable, *args, **kwargs)
|
||||
def done(res):
|
||||
if isinstance(res, failure.Failure):
|
||||
res.trap(expected_failure)
|
||||
if substring:
|
||||
self.failUnless(substring in str(res),
|
||||
"%s: substring '%s' not in '%s'"
|
||||
% (which, substring, str(res)))
|
||||
# return the Failure for further analysis, but in a form that
|
||||
# doesn't make the Deferred chain think that we failed.
|
||||
return [res]
|
||||
else:
|
||||
self.fail("%s was supposed to raise %s, not get '%s'" %
|
||||
(which, expected_failure, res))
|
||||
d.addBoth(done)
|
||||
return d
|
||||
|
||||
|
||||
class ReallyEqualMixin(object):
|
||||
def failUnlessReallyEqual(self, a, b, msg=None):
|
||||
self.assertEqual(a, b, msg)
|
||||
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
|
||||
|
||||
|
||||
def skip_if_cannot_represent_filename(u):
|
||||
precondition(isinstance(u, unicode))
|
||||
|
||||
enc = get_filesystem_encoding()
|
||||
if not unicode_platform():
|
||||
try:
|
||||
u.encode(enc)
|
||||
except UnicodeEncodeError:
|
||||
raise unittest.SkipTest("A non-ASCII filename could not be encoded on this platform.")
|
||||
|
||||
|
@ -7,27 +7,19 @@ from random import randrange
|
||||
from six.moves import StringIO
|
||||
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.python import failure
|
||||
from twisted.trial import unittest
|
||||
|
||||
from ..util.assertutil import precondition
|
||||
from allmydata.util.encodingutil import (unicode_platform, get_filesystem_encoding,
|
||||
|
||||
get_io_encoding)
|
||||
from allmydata.util.encodingutil import get_io_encoding
|
||||
from future.utils import PY2
|
||||
if PY2: # XXX this is a hack that makes some tests pass on Python3, remove
|
||||
# in the future
|
||||
from ..scripts import runner
|
||||
from .common_py3 import SignalMixin
|
||||
# Imported for backwards compatibility:
|
||||
from .common_py3 import (
|
||||
SignalMixin, skip_if_cannot_represent_filename, ReallyEqualMixin, ShouldFailMixin
|
||||
)
|
||||
|
||||
def skip_if_cannot_represent_filename(u):
|
||||
precondition(isinstance(u, unicode))
|
||||
|
||||
enc = get_filesystem_encoding()
|
||||
if not unicode_platform():
|
||||
try:
|
||||
u.encode(enc)
|
||||
except UnicodeEncodeError:
|
||||
raise unittest.SkipTest("A non-ASCII filename could not be encoded on this platform.")
|
||||
|
||||
def skip_if_cannot_represent_argv(u):
|
||||
precondition(isinstance(u, unicode))
|
||||
@ -88,40 +80,12 @@ def flip_one_bit(s, offset=0, size=None):
|
||||
return result
|
||||
|
||||
|
||||
class ReallyEqualMixin(object):
|
||||
def failUnlessReallyEqual(self, a, b, msg=None):
|
||||
self.assertEqual(a, b, msg)
|
||||
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
|
||||
|
||||
|
||||
class StallMixin(object):
|
||||
def stall(self, res=None, delay=1):
|
||||
d = defer.Deferred()
|
||||
reactor.callLater(delay, d.callback, res)
|
||||
return d
|
||||
|
||||
class ShouldFailMixin(object):
|
||||
|
||||
def shouldFail(self, expected_failure, which, substring,
|
||||
callable, *args, **kwargs):
|
||||
assert substring is None or isinstance(substring, str)
|
||||
d = defer.maybeDeferred(callable, *args, **kwargs)
|
||||
def done(res):
|
||||
if isinstance(res, failure.Failure):
|
||||
res.trap(expected_failure)
|
||||
if substring:
|
||||
self.failUnless(substring in str(res),
|
||||
"%s: substring '%s' not in '%s'"
|
||||
% (which, substring, str(res)))
|
||||
# return the Failure for further analysis, but in a form that
|
||||
# doesn't make the Deferred chain think that we failed.
|
||||
return [res]
|
||||
else:
|
||||
self.fail("%s was supposed to raise %s, not get '%s'" %
|
||||
(which, expected_failure, res))
|
||||
d.addBoth(done)
|
||||
return d
|
||||
|
||||
|
||||
class TestMixin(SignalMixin):
|
||||
def setUp(self):
|
||||
@ -187,3 +151,11 @@ except ImportError:
|
||||
os.chmod(path, stat.S_IWRITE | stat.S_IEXEC | stat.S_IREAD)
|
||||
make_readonly = _make_readonly
|
||||
make_accessible = _make_accessible
|
||||
|
||||
|
||||
__all__ = [
|
||||
"make_readonly", "make_accessible", "TestMixin", "ShouldFailMixin",
|
||||
"StallMixin", "skip_if_cannot_represent_argv", "run_cli", "parse_cli",
|
||||
"DevNullDictionary", "insecurerandstr", "flip_bit", "flip_one_bit",
|
||||
"SignalMixin", "skip_if_cannot_represent_filename", "ReallyEqualMixin"
|
||||
]
|
||||
|
37
src/allmydata/test/python3_tests.py
Normal file
37
src/allmydata/test/python3_tests.py
Normal file
@ -0,0 +1,37 @@
|
||||
"""
|
||||
This module defines the subset of the full test suite which is expected to
|
||||
pass on Python 3 in a way which makes that suite discoverable by trial.
|
||||
|
||||
This module has been ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.python.reflect import (
|
||||
namedModule,
|
||||
)
|
||||
from twisted.trial.runner import (
|
||||
TestLoader,
|
||||
)
|
||||
from twisted.trial.unittest import (
|
||||
TestSuite,
|
||||
)
|
||||
|
||||
from allmydata.util._python3 import (
|
||||
PORTED_TEST_MODULES,
|
||||
)
|
||||
|
||||
def testSuite():
|
||||
loader = TestLoader()
|
||||
return TestSuite(list(
|
||||
loader.loadModule(namedModule(module))
|
||||
for module
|
||||
in PORTED_TEST_MODULES
|
||||
))
|
@ -1,10 +1,25 @@
|
||||
|
||||
import json
|
||||
import os.path, shutil
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
|
||||
from nevow.inevow import IRequest
|
||||
from zope.interface import implementer
|
||||
from twisted.web.server import Request
|
||||
from twisted.web.test.requesthelper import DummyChannel
|
||||
from twisted.web.template import flattenString
|
||||
|
||||
from allmydata import check_results, uri
|
||||
from allmydata import uri as tahoe_uri
|
||||
from allmydata.interfaces import (
|
||||
IServer,
|
||||
ICheckResults,
|
||||
ICheckAndRepairResults,
|
||||
)
|
||||
from allmydata.util import base32
|
||||
from allmydata.web import check_results as web_check_results
|
||||
from allmydata.storage_client import StorageFarmBroker, NativeStorageServer
|
||||
@ -12,18 +27,115 @@ from allmydata.storage.server import storage_index_to_dir
|
||||
from allmydata.monitor import Monitor
|
||||
from allmydata.test.no_network import GridTestMixin
|
||||
from allmydata.immutable.upload import Data
|
||||
from allmydata.test.common_web import WebRenderingMixin
|
||||
from allmydata.mutable.publish import MutableData
|
||||
|
||||
from .common import (
|
||||
EMPTY_CLIENT_CONFIG,
|
||||
)
|
||||
|
||||
from .web.common import (
|
||||
assert_soup_has_favicon,
|
||||
assert_soup_has_tag_with_content,
|
||||
)
|
||||
|
||||
class FakeClient(object):
|
||||
def get_storage_broker(self):
|
||||
return self.storage_broker
|
||||
|
||||
class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
@implementer(IRequest)
|
||||
class TestRequest(Request, object):
|
||||
"""
|
||||
A minimal Request class to use in tests.
|
||||
|
||||
XXX: We have to have this class because `common.get_arg()` expects
|
||||
a `nevow.inevow.IRequest`, which `twisted.web.server.Request`
|
||||
isn't. The request needs to have `args`, `fields`, `prepath`, and
|
||||
`postpath` properties so that `allmydata.web.common.get_arg()`
|
||||
won't complain.
|
||||
"""
|
||||
def __init__(self, args=None, fields=None):
|
||||
super(TestRequest, self).__init__(DummyChannel())
|
||||
self.args = args or {}
|
||||
self.fields = fields or {}
|
||||
self.prepath = [b""]
|
||||
self.postpath = [b""]
|
||||
|
||||
|
||||
@implementer(IServer)
|
||||
class FakeServer(object):
|
||||
|
||||
def get_name(self):
|
||||
return "fake name"
|
||||
|
||||
def get_longname(self):
|
||||
return "fake longname"
|
||||
|
||||
def get_nickname(self):
|
||||
return "fake nickname"
|
||||
|
||||
|
||||
@implementer(ICheckResults)
|
||||
class FakeCheckResults(object):
|
||||
|
||||
def __init__(self, si=None,
|
||||
healthy=False, recoverable=False,
|
||||
summary="fake summary"):
|
||||
self._storage_index = si
|
||||
self._is_healthy = healthy
|
||||
self._is_recoverable = recoverable
|
||||
self._summary = summary
|
||||
|
||||
def get_storage_index(self):
|
||||
return self._storage_index
|
||||
|
||||
def get_storage_index_string(self):
|
||||
return base32.b2a_or_none(self._storage_index)
|
||||
|
||||
def is_healthy(self):
|
||||
return self._is_healthy
|
||||
|
||||
def is_recoverable(self):
|
||||
return self._is_recoverable
|
||||
|
||||
def get_summary(self):
|
||||
return self._summary
|
||||
|
||||
def get_corrupt_shares(self):
|
||||
# returns a list of (IServer, storage_index, sharenum)
|
||||
return [(FakeServer(), "<fake-si>", 0)]
|
||||
|
||||
|
||||
@implementer(ICheckAndRepairResults)
|
||||
class FakeCheckAndRepairResults(object):
|
||||
|
||||
def __init__(self, si=None,
|
||||
repair_attempted=False,
|
||||
repair_success=False):
|
||||
self._storage_index = si
|
||||
self._repair_attempted = repair_attempted
|
||||
self._repair_success = repair_success
|
||||
|
||||
def get_storage_index(self):
|
||||
return self._storage_index
|
||||
|
||||
def get_pre_repair_results(self):
|
||||
return FakeCheckResults()
|
||||
|
||||
def get_post_repair_results(self):
|
||||
return FakeCheckResults()
|
||||
|
||||
def get_repair_attempted(self):
|
||||
return self._repair_attempted
|
||||
|
||||
def get_repair_successful(self):
|
||||
return self._repair_success
|
||||
|
||||
|
||||
class WebResultsRendering(unittest.TestCase):
|
||||
|
||||
@staticmethod
|
||||
def remove_tags(html):
|
||||
return BeautifulSoup(html).get_text(separator=" ")
|
||||
|
||||
def create_fake_client(self):
|
||||
sb = StorageFarmBroker(True, None, EMPTY_CLIENT_CONFIG)
|
||||
@ -51,34 +163,31 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
c.storage_broker = sb
|
||||
return c
|
||||
|
||||
def render_json(self, page):
|
||||
d = self.render1(page, args={"output": ["json"]})
|
||||
return d
|
||||
def render_json(self, resource):
|
||||
return resource.render(TestRequest(args={"output": ["json"]}))
|
||||
|
||||
def render_element(self, element, args=None):
|
||||
d = flattenString(TestRequest(args), element)
|
||||
return unittest.TestCase().successResultOf(d)
|
||||
|
||||
def test_literal(self):
|
||||
lcr = web_check_results.LiteralCheckResultsRendererElement()
|
||||
|
||||
html = self.render_element(lcr)
|
||||
self.failUnlessIn("Literal files are always healthy", html)
|
||||
|
||||
html = self.render_element(lcr, args={"return_to": ["FOOURL"]})
|
||||
self.failUnlessIn("Literal files are always healthy", html)
|
||||
self.failUnlessIn('<a href="FOOURL">Return to file.</a>', html)
|
||||
|
||||
c = self.create_fake_client()
|
||||
lcr = web_check_results.LiteralCheckResultsRenderer(c)
|
||||
|
||||
d = self.render1(lcr)
|
||||
def _check(html):
|
||||
s = self.remove_tags(html)
|
||||
self.failUnlessIn("Literal files are always healthy", s)
|
||||
d.addCallback(_check)
|
||||
d.addCallback(lambda ignored:
|
||||
self.render1(lcr, args={"return_to": ["FOOURL"]}))
|
||||
def _check_return_to(html):
|
||||
s = self.remove_tags(html)
|
||||
self.failUnlessIn("Literal files are always healthy", s)
|
||||
self.failUnlessIn('<a href="FOOURL">Return to file.</a>',
|
||||
html)
|
||||
d.addCallback(_check_return_to)
|
||||
d.addCallback(lambda ignored: self.render_json(lcr))
|
||||
def _check_json(js):
|
||||
j = json.loads(js)
|
||||
self.failUnlessEqual(j["storage-index"], "")
|
||||
self.failUnlessEqual(j["results"]["healthy"], True)
|
||||
d.addCallback(_check_json)
|
||||
return d
|
||||
js = self.render_json(lcr)
|
||||
j = json.loads(js)
|
||||
self.failUnlessEqual(j["storage-index"], "")
|
||||
self.failUnlessEqual(j["results"]["healthy"], True)
|
||||
|
||||
|
||||
def test_check(self):
|
||||
c = self.create_fake_client()
|
||||
@ -108,8 +217,8 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
healthy=True, recoverable=True,
|
||||
summary="groovy",
|
||||
**data)
|
||||
w = web_check_results.CheckResultsRenderer(c, cr)
|
||||
html = self.render2(w)
|
||||
w = web_check_results.CheckResultsRendererElement(c, cr)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
|
||||
self.failUnlessIn("Healthy : groovy", s)
|
||||
@ -120,14 +229,14 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
self.failUnlessIn("Wrong Shares: 0", s)
|
||||
self.failUnlessIn("Recoverable Versions: 1", s)
|
||||
self.failUnlessIn("Unrecoverable Versions: 0", s)
|
||||
self.failUnlessIn("Good Shares (sorted in share order): Share ID Nickname Node ID shareid1 peer-0 00000000 peer-f ffffffff", s)
|
||||
self.failUnlessIn("Good Shares (sorted in share order): Share ID Nickname Node ID shareid1 peer-0 00000000 peer-f ffffffff", s)
|
||||
|
||||
cr = check_results.CheckResults(u, u.get_storage_index(),
|
||||
healthy=False, recoverable=True,
|
||||
summary="ungroovy",
|
||||
**data)
|
||||
w = web_check_results.CheckResultsRenderer(c, cr)
|
||||
html = self.render2(w)
|
||||
w = web_check_results.CheckResultsRendererElement(c, cr)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
|
||||
self.failUnlessIn("Not Healthy! : ungroovy", s)
|
||||
@ -138,22 +247,23 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
healthy=False, recoverable=False,
|
||||
summary="rather dead",
|
||||
**data)
|
||||
w = web_check_results.CheckResultsRenderer(c, cr)
|
||||
html = self.render2(w)
|
||||
w = web_check_results.CheckResultsRendererElement(c, cr)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
|
||||
self.failUnlessIn("Not Recoverable! : rather dead", s)
|
||||
self.failUnlessIn("Corrupt shares: Share ID Nickname Node ID sh#2 peer-0 00000000", s)
|
||||
self.failUnlessIn("Corrupt shares: Share ID Nickname Node ID sh#2 peer-0 00000000", s)
|
||||
|
||||
html = self.render2(w)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
|
||||
self.failUnlessIn("Not Recoverable! : rather dead", s)
|
||||
|
||||
html = self.render2(w, args={"return_to": ["FOOURL"]})
|
||||
html = self.render_element(w, args={"return_to": ["FOOURL"]})
|
||||
self.failUnlessIn('<a href="FOOURL">Return to file/directory.</a>',
|
||||
html)
|
||||
|
||||
w = web_check_results.CheckResultsRenderer(c, cr)
|
||||
d = self.render_json(w)
|
||||
def _check_json(jdata):
|
||||
j = json.loads(jdata)
|
||||
@ -178,15 +288,15 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
'recoverable': False,
|
||||
}
|
||||
self.failUnlessEqual(j["results"], expected)
|
||||
d.addCallback(_check_json)
|
||||
d.addCallback(lambda ignored: self.render1(w))
|
||||
_check_json(d)
|
||||
|
||||
w = web_check_results.CheckResultsRendererElement(c, cr)
|
||||
d = self.render_element(w)
|
||||
def _check(html):
|
||||
s = self.remove_tags(html)
|
||||
self.failUnlessIn("File Check Results for SI=2k6avp", s)
|
||||
self.failUnlessIn("Not Recoverable! : rather dead", s)
|
||||
d.addCallback(_check)
|
||||
return d
|
||||
|
||||
_check(html)
|
||||
|
||||
def test_check_and_repair(self):
|
||||
c = self.create_fake_client()
|
||||
@ -244,8 +354,8 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
crr.post_repair_results = post_cr
|
||||
crr.repair_attempted = False
|
||||
|
||||
w = web_check_results.CheckAndRepairResultsRenderer(c, crr)
|
||||
html = self.render2(w)
|
||||
w = web_check_results.CheckAndRepairResultsRendererElement(c, crr)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
|
||||
self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
|
||||
@ -256,7 +366,7 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
|
||||
crr.repair_attempted = True
|
||||
crr.repair_successful = True
|
||||
html = self.render2(w)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
|
||||
self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
|
||||
@ -271,7 +381,7 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
summary="better",
|
||||
**data)
|
||||
crr.post_repair_results = post_cr
|
||||
html = self.render2(w)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
|
||||
self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
|
||||
@ -286,7 +396,7 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
summary="worse",
|
||||
**data)
|
||||
crr.post_repair_results = post_cr
|
||||
html = self.render2(w)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
|
||||
self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
|
||||
@ -294,24 +404,218 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
self.failUnlessIn("Repair unsuccessful", s)
|
||||
self.failUnlessIn("Post-Repair Checker Results:", s)
|
||||
|
||||
d = self.render_json(w)
|
||||
def _got_json(data):
|
||||
j = json.loads(data)
|
||||
self.failUnlessEqual(j["repair-attempted"], True)
|
||||
self.failUnlessEqual(j["storage-index"],
|
||||
"2k6avpjga3dho3zsjo6nnkt7n4")
|
||||
self.failUnlessEqual(j["pre-repair-results"]["summary"], "illing")
|
||||
self.failUnlessEqual(j["post-repair-results"]["summary"], "worse")
|
||||
d.addCallback(_got_json)
|
||||
w = web_check_results.CheckAndRepairResultsRenderer(c, crr)
|
||||
j = json.loads(self.render_json(w))
|
||||
self.failUnlessEqual(j["repair-attempted"], True)
|
||||
self.failUnlessEqual(j["storage-index"],
|
||||
"2k6avpjga3dho3zsjo6nnkt7n4")
|
||||
self.failUnlessEqual(j["pre-repair-results"]["summary"], "illing")
|
||||
self.failUnlessEqual(j["post-repair-results"]["summary"], "worse")
|
||||
|
||||
w = web_check_results.CheckAndRepairResultsRenderer(c, None)
|
||||
j = json.loads(self.render_json(w))
|
||||
self.failUnlessEqual(j["repair-attempted"], False)
|
||||
self.failUnlessEqual(j["storage-index"], "")
|
||||
|
||||
|
||||
def test_deep_check_renderer(self):
|
||||
status = check_results.DeepCheckResults("fake-root-si")
|
||||
status.add_check(
|
||||
FakeCheckResults("<unhealthy/unrecoverable>", False, False),
|
||||
(u"fake", u"unhealthy", u"unrecoverable")
|
||||
)
|
||||
status.add_check(
|
||||
FakeCheckResults("<healthy/recoverable>", True, True),
|
||||
(u"fake", u"healthy", u"recoverable")
|
||||
)
|
||||
status.add_check(
|
||||
FakeCheckResults("<healthy/unrecoverable>", True, False),
|
||||
(u"fake", u"healthy", u"unrecoverable")
|
||||
)
|
||||
status.add_check(
|
||||
FakeCheckResults("<unhealthy/unrecoverable>", False, True),
|
||||
(u"fake", u"unhealthy", u"recoverable")
|
||||
)
|
||||
|
||||
monitor = Monitor()
|
||||
monitor.set_status(status)
|
||||
|
||||
elem = web_check_results.DeepCheckResultsRendererElement(monitor)
|
||||
doc = self.render_element(elem)
|
||||
soup = BeautifulSoup(doc, 'html5lib')
|
||||
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"title",
|
||||
u"Tahoe-LAFS - Deep Check Results"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h1",
|
||||
"Deep-Check Results for root SI="
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Checked: 4"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Healthy: 2"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Unhealthy: 2"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Unrecoverable: 2"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Corrupt Shares: 4"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Files/Directories That Had Problems:"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"fake/unhealthy/recoverable: fake summary"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"fake/unhealthy/unrecoverable: fake summary"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Servers on which corrupt shares were found"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Corrupt Shares"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"All Results"
|
||||
)
|
||||
|
||||
def test_deep_check_and_repair_renderer(self):
|
||||
status = check_results.DeepCheckAndRepairResults("")
|
||||
|
||||
status.add_check_and_repair(
|
||||
FakeCheckAndRepairResults("attempted/success", True, True),
|
||||
(u"attempted", u"success")
|
||||
)
|
||||
status.add_check_and_repair(
|
||||
FakeCheckAndRepairResults("attempted/failure", True, False),
|
||||
(u"attempted", u"failure")
|
||||
)
|
||||
status.add_check_and_repair(
|
||||
FakeCheckAndRepairResults("unattempted/failure", False, False),
|
||||
(u"unattempted", u"failure")
|
||||
)
|
||||
|
||||
monitor = Monitor()
|
||||
monitor.set_status(status)
|
||||
|
||||
elem = web_check_results.DeepCheckAndRepairResultsRendererElement(monitor)
|
||||
doc = self.render_element(elem)
|
||||
soup = BeautifulSoup(doc, 'html5lib')
|
||||
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"title",
|
||||
u"Tahoe-LAFS - Deep Check Results"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h1",
|
||||
u"Deep-Check-And-Repair Results for root SI="
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Checked: 3"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Healthy (before repair): 0"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Unhealthy (before repair): 3"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Corrupt Shares (before repair): 3"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Repairs Attempted: 2"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Repairs Successful: 1"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
"Repairs Unsuccessful: 1"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Healthy (after repair): 0"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Unhealthy (after repair): 3"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Corrupt Shares (after repair): 3"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Files/Directories That Had Problems:"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Files/Directories That Still Have Problems:"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Servers on which corrupt shares were found"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Remaining Corrupt Shares"
|
||||
)
|
||||
|
||||
w2 = web_check_results.CheckAndRepairResultsRenderer(c, None)
|
||||
d.addCallback(lambda ignored: self.render_json(w2))
|
||||
def _got_lit_results(data):
|
||||
j = json.loads(data)
|
||||
self.failUnlessEqual(j["repair-attempted"], False)
|
||||
self.failUnlessEqual(j["storage-index"], "")
|
||||
d.addCallback(_got_lit_results)
|
||||
return d
|
||||
|
||||
class BalancingAct(GridTestMixin, unittest.TestCase):
|
||||
# test for #1115 regarding the 'count-good-share-hosts' metric
|
||||
|
@ -1,4 +1,14 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2, PY3
|
||||
if PY2:
|
||||
# We don't import str because omg way too ambiguous in this context.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
lumiere_nfc = u"lumi\u00E8re"
|
||||
Artonwall_nfc = u"\u00C4rtonwall.mp3"
|
||||
@ -43,8 +53,10 @@ if __name__ == "__main__":
|
||||
for fname in TEST_FILENAMES:
|
||||
open(os.path.join(tmpdir, fname), 'w').close()
|
||||
|
||||
# Use Unicode API under Windows or MacOS X
|
||||
if sys.platform in ('win32', 'darwin'):
|
||||
# On Python 2, listing directories returns unicode under Windows or
|
||||
# MacOS X if the input is unicode. On Python 3, it always returns
|
||||
# Unicode.
|
||||
if PY2 and sys.platform in ('win32', 'darwin'):
|
||||
dirlist = os.listdir(unicode(tmpdir))
|
||||
else:
|
||||
dirlist = os.listdir(tmpdir)
|
||||
@ -59,20 +71,22 @@ if __name__ == "__main__":
|
||||
|
||||
|
||||
import os, sys, locale
|
||||
from unittest import skipIf
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from twisted.python.filepath import FilePath
|
||||
|
||||
from allmydata.test.common_util import ReallyEqualMixin
|
||||
from allmydata.test.common_py3 import (
|
||||
ReallyEqualMixin, skip_if_cannot_represent_filename,
|
||||
)
|
||||
from allmydata.util import encodingutil, fileutil
|
||||
from allmydata.util.encodingutil import argv_to_unicode, unicode_to_url, \
|
||||
unicode_to_output, quote_output, quote_path, quote_local_unicode_path, \
|
||||
quote_filepath, unicode_platform, listdir_unicode, FilenameEncodingError, \
|
||||
get_io_encoding, get_filesystem_encoding, to_str, from_utf8_or_none, _reload, \
|
||||
to_filepath, extend_filepath, unicode_from_filepath, unicode_segments_from
|
||||
from allmydata.dirnode import normalize
|
||||
from .common_util import skip_if_cannot_represent_filename
|
||||
to_filepath, extend_filepath, unicode_from_filepath, unicode_segments_from, \
|
||||
unicode_to_argv
|
||||
from twisted.python import usage
|
||||
|
||||
|
||||
@ -90,7 +104,7 @@ class EncodingUtilErrors(ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
mock_stdout.encoding = 'cp65001'
|
||||
_reload()
|
||||
self.failUnlessReallyEqual(get_io_encoding(), 'utf-8')
|
||||
self.assertEqual(get_io_encoding(), 'utf-8')
|
||||
|
||||
mock_stdout.encoding = 'koi8-r'
|
||||
expected = sys.platform == "win32" and 'utf-8' or 'koi8-r'
|
||||
@ -122,7 +136,7 @@ class EncodingUtilErrors(ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
preferredencoding = None
|
||||
_reload()
|
||||
self.failUnlessReallyEqual(get_io_encoding(), 'utf-8')
|
||||
self.assertEqual(get_io_encoding(), 'utf-8')
|
||||
|
||||
def test_argv_to_unicode(self):
|
||||
encodingutil.io_encoding = 'utf-8'
|
||||
@ -150,6 +164,7 @@ class EncodingUtilErrors(ReallyEqualMixin, unittest.TestCase):
|
||||
# The following tests apply only to platforms that don't store filenames as
|
||||
# Unicode entities on the filesystem.
|
||||
class EncodingUtilNonUnicodePlatform(unittest.TestCase):
|
||||
@skipIf(PY3, "Python 3 is always Unicode, regardless of OS.")
|
||||
def setUp(self):
|
||||
# Mock sys.platform because unicode_platform() uses it
|
||||
self.original_platform = sys.platform
|
||||
@ -211,7 +226,7 @@ class EncodingUtil(ReallyEqualMixin):
|
||||
self.failUnlessReallyEqual(argv_to_unicode(argv), argu)
|
||||
|
||||
def test_unicode_to_url(self):
|
||||
self.failUnless(unicode_to_url(lumiere_nfc), "lumi\xc3\xa8re")
|
||||
self.failUnless(unicode_to_url(lumiere_nfc), b"lumi\xc3\xa8re")
|
||||
|
||||
def test_unicode_to_output(self):
|
||||
if 'argv' not in dir(self):
|
||||
@ -224,7 +239,18 @@ class EncodingUtil(ReallyEqualMixin):
|
||||
_reload()
|
||||
self.failUnlessReallyEqual(unicode_to_output(lumiere_nfc), self.argv)
|
||||
|
||||
def test_unicode_platform(self):
|
||||
@skipIf(PY3, "Python 2 only.")
|
||||
def test_unicode_to_argv_py2(self):
|
||||
"""unicode_to_argv() converts to bytes on Python 2."""
|
||||
self.assertEqual(unicode_to_argv("abc"), u"abc".encode(self.io_encoding))
|
||||
|
||||
@skipIf(PY2, "Python 3 only.")
|
||||
def test_unicode_to_argv_py3(self):
|
||||
"""unicode_to_argv() is noop on Python 3."""
|
||||
self.assertEqual(unicode_to_argv("abc"), "abc")
|
||||
|
||||
@skipIf(PY3, "Python 3 only.")
|
||||
def test_unicode_platform_py2(self):
|
||||
matrix = {
|
||||
'linux2': False,
|
||||
'linux3': False,
|
||||
@ -236,6 +262,11 @@ class EncodingUtil(ReallyEqualMixin):
|
||||
_reload()
|
||||
self.failUnlessReallyEqual(unicode_platform(), matrix[self.platform])
|
||||
|
||||
@skipIf(PY2, "Python 3 isn't Python 2.")
|
||||
def test_unicode_platform_py3(self):
|
||||
_reload()
|
||||
self.failUnlessReallyEqual(unicode_platform(), True)
|
||||
|
||||
def test_listdir_unicode(self):
|
||||
if 'dirlist' not in dir(self):
|
||||
return
|
||||
@ -248,7 +279,14 @@ class EncodingUtil(ReallyEqualMixin):
|
||||
% (self.filesystem_encoding,))
|
||||
|
||||
def call_os_listdir(path):
|
||||
return self.dirlist
|
||||
if PY2:
|
||||
return self.dirlist
|
||||
else:
|
||||
# Python 3 always lists unicode filenames:
|
||||
return [d.decode(self.filesystem_encoding) if isinstance(d, bytes)
|
||||
else d
|
||||
for d in self.dirlist]
|
||||
|
||||
self.patch(os, 'listdir', call_os_listdir)
|
||||
|
||||
def call_sys_getfilesystemencoding():
|
||||
@ -258,7 +296,7 @@ class EncodingUtil(ReallyEqualMixin):
|
||||
_reload()
|
||||
filenames = listdir_unicode(u'/dummy')
|
||||
|
||||
self.failUnlessEqual(set([normalize(fname) for fname in filenames]),
|
||||
self.failUnlessEqual(set([encodingutil.normalize(fname) for fname in filenames]),
|
||||
set(TEST_FILENAMES))
|
||||
|
||||
|
||||
@ -278,12 +316,16 @@ class StdlibUnicode(unittest.TestCase):
|
||||
fn = lumiere_nfc + u'/' + lumiere_nfc + u'.txt'
|
||||
open(fn, 'wb').close()
|
||||
self.failUnless(os.path.exists(fn))
|
||||
self.failUnless(os.path.exists(os.path.join(os.getcwdu(), fn)))
|
||||
if PY2:
|
||||
getcwdu = os.getcwdu
|
||||
else:
|
||||
getcwdu = os.getcwd
|
||||
self.failUnless(os.path.exists(os.path.join(getcwdu(), fn)))
|
||||
filenames = listdir_unicode(lumiere_nfc)
|
||||
|
||||
# We only require that the listing includes a filename that is canonically equivalent
|
||||
# to lumiere_nfc (on Mac OS X, it will be the NFD equivalent).
|
||||
self.failUnlessIn(lumiere_nfc + ".txt", set([normalize(fname) for fname in filenames]))
|
||||
self.failUnlessIn(lumiere_nfc + u".txt", set([encodingutil.normalize(fname) for fname in filenames]))
|
||||
|
||||
expanded = fileutil.expanduser(u"~/" + lumiere_nfc)
|
||||
self.failIfIn(u"~", expanded)
|
||||
@ -314,59 +356,70 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
|
||||
self.failUnlessReallyEqual(quote_output(inp, encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
|
||||
if out[0:2] == 'b"':
|
||||
pass
|
||||
elif isinstance(inp, str):
|
||||
self.failUnlessReallyEqual(quote_output(unicode(inp), encoding=enc, quote_newlines=quote_newlines), out)
|
||||
self.failUnlessReallyEqual(quote_output(unicode(inp), encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
|
||||
elif isinstance(inp, bytes):
|
||||
try:
|
||||
unicode_inp = inp.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
# Some things decode on Python 2, but not Python 3...
|
||||
return
|
||||
self.failUnlessReallyEqual(quote_output(unicode_inp, encoding=enc, quote_newlines=quote_newlines), out)
|
||||
self.failUnlessReallyEqual(quote_output(unicode_inp, encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
|
||||
else:
|
||||
self.failUnlessReallyEqual(quote_output(inp.encode('utf-8'), encoding=enc, quote_newlines=quote_newlines), out)
|
||||
self.failUnlessReallyEqual(quote_output(inp.encode('utf-8'), encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
|
||||
try:
|
||||
bytes_inp = inp.encode('utf-8')
|
||||
except UnicodeEncodeError:
|
||||
# Some things encode on Python 2, but not Python 3, e.g.
|
||||
# surrogates like u"\uDC00\uD800"...
|
||||
return
|
||||
self.failUnlessReallyEqual(quote_output(bytes_inp, encoding=enc, quote_newlines=quote_newlines), out)
|
||||
self.failUnlessReallyEqual(quote_output(bytes_inp, encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
|
||||
|
||||
def _test_quote_output_all(self, enc):
|
||||
def check(inp, out, optional_quotes=False, quote_newlines=None):
|
||||
self._check(inp, out, enc, optional_quotes, quote_newlines)
|
||||
|
||||
# optional single quotes
|
||||
check("foo", "'foo'", True)
|
||||
check("\\", "'\\'", True)
|
||||
check("$\"`", "'$\"`'", True)
|
||||
check("\n", "'\n'", True, quote_newlines=False)
|
||||
check(b"foo", b"'foo'", True)
|
||||
check(b"\\", b"'\\'", True)
|
||||
check(b"$\"`", b"'$\"`'", True)
|
||||
check(b"\n", b"'\n'", True, quote_newlines=False)
|
||||
|
||||
# mandatory single quotes
|
||||
check("\"", "'\"'")
|
||||
check(b"\"", b"'\"'")
|
||||
|
||||
# double quotes
|
||||
check("'", "\"'\"")
|
||||
check("\n", "\"\\x0a\"", quote_newlines=True)
|
||||
check("\x00", "\"\\x00\"")
|
||||
check(b"'", b"\"'\"")
|
||||
check(b"\n", b"\"\\x0a\"", quote_newlines=True)
|
||||
check(b"\x00", b"\"\\x00\"")
|
||||
|
||||
# invalid Unicode and astral planes
|
||||
check(u"\uFDD0\uFDEF", "\"\\ufdd0\\ufdef\"")
|
||||
check(u"\uDC00\uD800", "\"\\udc00\\ud800\"")
|
||||
check(u"\uDC00\uD800\uDC00", "\"\\udc00\\U00010000\"")
|
||||
check(u"\uD800\uDC00", "\"\\U00010000\"")
|
||||
check(u"\uD800\uDC01", "\"\\U00010001\"")
|
||||
check(u"\uD801\uDC00", "\"\\U00010400\"")
|
||||
check(u"\uDBFF\uDFFF", "\"\\U0010ffff\"")
|
||||
check(u"'\uDBFF\uDFFF", "\"'\\U0010ffff\"")
|
||||
check(u"\"\uDBFF\uDFFF", "\"\\\"\\U0010ffff\"")
|
||||
check(u"\uFDD0\uFDEF", b"\"\\ufdd0\\ufdef\"")
|
||||
check(u"\uDC00\uD800", b"\"\\udc00\\ud800\"")
|
||||
check(u"\uDC00\uD800\uDC00", b"\"\\udc00\\U00010000\"")
|
||||
check(u"\uD800\uDC00", b"\"\\U00010000\"")
|
||||
check(u"\uD800\uDC01", b"\"\\U00010001\"")
|
||||
check(u"\uD801\uDC00", b"\"\\U00010400\"")
|
||||
check(u"\uDBFF\uDFFF", b"\"\\U0010ffff\"")
|
||||
check(u"'\uDBFF\uDFFF", b"\"'\\U0010ffff\"")
|
||||
check(u"\"\uDBFF\uDFFF", b"\"\\\"\\U0010ffff\"")
|
||||
|
||||
# invalid UTF-8
|
||||
check("\xFF", "b\"\\xff\"")
|
||||
check("\x00\"$\\`\x80\xFF", "b\"\\x00\\\"\\$\\\\\\`\\x80\\xff\"")
|
||||
check(b"\xFF", b"b\"\\xff\"")
|
||||
check(b"\x00\"$\\`\x80\xFF", b"b\"\\x00\\\"\\$\\\\\\`\\x80\\xff\"")
|
||||
|
||||
def test_quote_output_ascii(self, enc='ascii'):
|
||||
def check(inp, out, optional_quotes=False, quote_newlines=None):
|
||||
self._check(inp, out, enc, optional_quotes, quote_newlines)
|
||||
|
||||
self._test_quote_output_all(enc)
|
||||
check(u"\u00D7", "\"\\xd7\"")
|
||||
check(u"'\u00D7", "\"'\\xd7\"")
|
||||
check(u"\"\u00D7", "\"\\\"\\xd7\"")
|
||||
check(u"\u2621", "\"\\u2621\"")
|
||||
check(u"'\u2621", "\"'\\u2621\"")
|
||||
check(u"\"\u2621", "\"\\\"\\u2621\"")
|
||||
check(u"\n", "'\n'", True, quote_newlines=False)
|
||||
check(u"\n", "\"\\x0a\"", quote_newlines=True)
|
||||
check(u"\u00D7", b"\"\\xd7\"")
|
||||
check(u"'\u00D7", b"\"'\\xd7\"")
|
||||
check(u"\"\u00D7", b"\"\\\"\\xd7\"")
|
||||
check(u"\u2621", b"\"\\u2621\"")
|
||||
check(u"'\u2621", b"\"'\\u2621\"")
|
||||
check(u"\"\u2621", b"\"\\\"\\u2621\"")
|
||||
check(u"\n", b"'\n'", True, quote_newlines=False)
|
||||
check(u"\n", b"\"\\x0a\"", quote_newlines=True)
|
||||
|
||||
def test_quote_output_latin1(self, enc='latin1'):
|
||||
def check(inp, out, optional_quotes=False, quote_newlines=None):
|
||||
@ -411,43 +464,43 @@ def win32_other(win32, other):
|
||||
|
||||
class QuotePaths(ReallyEqualMixin, unittest.TestCase):
|
||||
def test_quote_path(self):
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'bar']), "'foo/bar'")
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'bar'], quotemarks=True), "'foo/bar'")
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'bar'], quotemarks=False), "foo/bar")
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar']), '"foo/\\x0abar"')
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar'], quotemarks=True), '"foo/\\x0abar"')
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar'], quotemarks=False), '"foo/\\x0abar"')
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'bar']), b"'foo/bar'")
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'bar'], quotemarks=True), b"'foo/bar'")
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'bar'], quotemarks=False), b"foo/bar")
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar']), b'"foo/\\x0abar"')
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar'], quotemarks=True), b'"foo/\\x0abar"')
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar'], quotemarks=False), b'"foo/\\x0abar"')
|
||||
|
||||
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\C:\\foo"),
|
||||
win32_other("'C:\\foo'", "'\\\\?\\C:\\foo'"))
|
||||
win32_other(b"'C:\\foo'", b"'\\\\?\\C:\\foo'"))
|
||||
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\C:\\foo", quotemarks=True),
|
||||
win32_other("'C:\\foo'", "'\\\\?\\C:\\foo'"))
|
||||
win32_other(b"'C:\\foo'", b"'\\\\?\\C:\\foo'"))
|
||||
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\C:\\foo", quotemarks=False),
|
||||
win32_other("C:\\foo", "\\\\?\\C:\\foo"))
|
||||
win32_other(b"C:\\foo", b"\\\\?\\C:\\foo"))
|
||||
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\UNC\\foo\\bar"),
|
||||
win32_other("'\\\\foo\\bar'", "'\\\\?\\UNC\\foo\\bar'"))
|
||||
win32_other(b"'\\\\foo\\bar'", b"'\\\\?\\UNC\\foo\\bar'"))
|
||||
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\UNC\\foo\\bar", quotemarks=True),
|
||||
win32_other("'\\\\foo\\bar'", "'\\\\?\\UNC\\foo\\bar'"))
|
||||
win32_other(b"'\\\\foo\\bar'", b"'\\\\?\\UNC\\foo\\bar'"))
|
||||
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\UNC\\foo\\bar", quotemarks=False),
|
||||
win32_other("\\\\foo\\bar", "\\\\?\\UNC\\foo\\bar"))
|
||||
win32_other(b"\\\\foo\\bar", b"\\\\?\\UNC\\foo\\bar"))
|
||||
|
||||
def test_quote_filepath(self):
|
||||
foo_bar_fp = FilePath(win32_other(u'C:\\foo\\bar', u'/foo/bar'))
|
||||
self.failUnlessReallyEqual(quote_filepath(foo_bar_fp),
|
||||
win32_other("'C:\\foo\\bar'", "'/foo/bar'"))
|
||||
win32_other(b"'C:\\foo\\bar'", b"'/foo/bar'"))
|
||||
self.failUnlessReallyEqual(quote_filepath(foo_bar_fp, quotemarks=True),
|
||||
win32_other("'C:\\foo\\bar'", "'/foo/bar'"))
|
||||
win32_other(b"'C:\\foo\\bar'", b"'/foo/bar'"))
|
||||
self.failUnlessReallyEqual(quote_filepath(foo_bar_fp, quotemarks=False),
|
||||
win32_other("C:\\foo\\bar", "/foo/bar"))
|
||||
win32_other(b"C:\\foo\\bar", b"/foo/bar"))
|
||||
|
||||
if sys.platform == "win32":
|
||||
foo_longfp = FilePath(u'\\\\?\\C:\\foo')
|
||||
self.failUnlessReallyEqual(quote_filepath(foo_longfp),
|
||||
"'C:\\foo'")
|
||||
b"'C:\\foo'")
|
||||
self.failUnlessReallyEqual(quote_filepath(foo_longfp, quotemarks=True),
|
||||
"'C:\\foo'")
|
||||
b"'C:\\foo'")
|
||||
self.failUnlessReallyEqual(quote_filepath(foo_longfp, quotemarks=False),
|
||||
"C:\\foo")
|
||||
b"C:\\foo")
|
||||
|
||||
|
||||
class FilePaths(ReallyEqualMixin, unittest.TestCase):
|
||||
@ -501,23 +554,23 @@ class FilePaths(ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
class UbuntuKarmicUTF8(EncodingUtil, unittest.TestCase):
|
||||
uname = 'Linux korn 2.6.31-14-generic #48-Ubuntu SMP Fri Oct 16 14:05:01 UTC 2009 x86_64'
|
||||
argv = 'lumi\xc3\xa8re'
|
||||
argv = b'lumi\xc3\xa8re'
|
||||
platform = 'linux2'
|
||||
filesystem_encoding = 'UTF-8'
|
||||
io_encoding = 'UTF-8'
|
||||
dirlist = ['test_file', '\xc3\x84rtonwall.mp3', 'Blah blah.txt']
|
||||
dirlist = [b'test_file', b'\xc3\x84rtonwall.mp3', b'Blah blah.txt']
|
||||
|
||||
class UbuntuKarmicLatin1(EncodingUtil, unittest.TestCase):
|
||||
uname = 'Linux korn 2.6.31-14-generic #48-Ubuntu SMP Fri Oct 16 14:05:01 UTC 2009 x86_64'
|
||||
argv = 'lumi\xe8re'
|
||||
argv = b'lumi\xe8re'
|
||||
platform = 'linux2'
|
||||
filesystem_encoding = 'ISO-8859-1'
|
||||
io_encoding = 'ISO-8859-1'
|
||||
dirlist = ['test_file', 'Blah blah.txt', '\xc4rtonwall.mp3']
|
||||
dirlist = [b'test_file', b'Blah blah.txt', b'\xc4rtonwall.mp3']
|
||||
|
||||
class Windows(EncodingUtil, unittest.TestCase):
|
||||
uname = 'Windows XP 5.1.2600 x86 x86 Family 15 Model 75 Step ping 2, AuthenticAMD'
|
||||
argv = 'lumi\xc3\xa8re'
|
||||
argv = b'lumi\xc3\xa8re'
|
||||
platform = 'win32'
|
||||
filesystem_encoding = 'mbcs'
|
||||
io_encoding = 'utf-8'
|
||||
@ -525,7 +578,7 @@ class Windows(EncodingUtil, unittest.TestCase):
|
||||
|
||||
class MacOSXLeopard(EncodingUtil, unittest.TestCase):
|
||||
uname = 'Darwin g5.local 9.8.0 Darwin Kernel Version 9.8.0: Wed Jul 15 16:57:01 PDT 2009; root:xnu-1228.15.4~1/RELEASE_PPC Power Macintosh powerpc'
|
||||
output = 'lumi\xc3\xa8re'
|
||||
output = b'lumi\xc3\xa8re'
|
||||
platform = 'darwin'
|
||||
filesystem_encoding = 'utf-8'
|
||||
io_encoding = 'UTF-8'
|
||||
@ -548,14 +601,14 @@ class OpenBSD(EncodingUtil, unittest.TestCase):
|
||||
|
||||
class TestToFromStr(ReallyEqualMixin, unittest.TestCase):
|
||||
def test_to_str(self):
|
||||
self.failUnlessReallyEqual(to_str("foo"), "foo")
|
||||
self.failUnlessReallyEqual(to_str("lumi\xc3\xa8re"), "lumi\xc3\xa8re")
|
||||
self.failUnlessReallyEqual(to_str("\xFF"), "\xFF") # passes through invalid UTF-8 -- is this what we want?
|
||||
self.failUnlessReallyEqual(to_str(u"lumi\u00E8re"), "lumi\xc3\xa8re")
|
||||
self.failUnlessReallyEqual(to_str(b"foo"), b"foo")
|
||||
self.failUnlessReallyEqual(to_str(b"lumi\xc3\xa8re"), b"lumi\xc3\xa8re")
|
||||
self.failUnlessReallyEqual(to_str(b"\xFF"), b"\xFF") # passes through invalid UTF-8 -- is this what we want?
|
||||
self.failUnlessReallyEqual(to_str(u"lumi\u00E8re"), b"lumi\xc3\xa8re")
|
||||
self.failUnlessReallyEqual(to_str(None), None)
|
||||
|
||||
def test_from_utf8_or_none(self):
|
||||
self.failUnlessRaises(AssertionError, from_utf8_or_none, u"foo")
|
||||
self.failUnlessReallyEqual(from_utf8_or_none("lumi\xc3\xa8re"), u"lumi\u00E8re")
|
||||
self.failUnlessReallyEqual(from_utf8_or_none(b"lumi\xc3\xa8re"), u"lumi\u00E8re")
|
||||
self.failUnlessReallyEqual(from_utf8_or_none(None), None)
|
||||
self.failUnlessRaises(UnicodeDecodeError, from_utf8_or_none, "\xFF")
|
||||
self.failUnlessRaises(UnicodeDecodeError, from_utf8_or_none, b"\xFF")
|
||||
|
@ -1,4 +1,10 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Tests for allmydata.immutable.happiness_upload and
|
||||
allmydata.util.happinessutil.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
@ -13,12 +19,17 @@ if PY2:
|
||||
from twisted.trial import unittest
|
||||
from hypothesis import given
|
||||
from hypothesis.strategies import text, sets
|
||||
|
||||
from allmydata.immutable import happiness_upload
|
||||
from allmydata.util.happinessutil import servers_of_happiness, \
|
||||
shares_by_server, merge_servers
|
||||
from allmydata.test.common_py3 import ShouldFailMixin
|
||||
|
||||
|
||||
class HappinessUtils(unittest.TestCase):
|
||||
class HappinessUploadUtils(unittest.TestCase):
|
||||
"""
|
||||
test-cases for utility functions augmenting_path_for and residual_network
|
||||
test-cases for happiness_upload utility functions augmenting_path_for and
|
||||
residual_network.
|
||||
"""
|
||||
|
||||
def test_residual_0(self):
|
||||
@ -279,3 +290,192 @@ class PlacementTests(unittest.TestCase):
|
||||
# peers; if we have fewer shares than peers happiness is capped at
|
||||
# # of peers.
|
||||
assert happiness == min(len(peers), len(shares))
|
||||
|
||||
|
||||
class FakeServerTracker(object):
|
||||
def __init__(self, serverid, buckets):
|
||||
self._serverid = serverid
|
||||
self.buckets = buckets
|
||||
def get_serverid(self):
|
||||
return self._serverid
|
||||
|
||||
|
||||
class HappinessUtilTests(unittest.TestCase, ShouldFailMixin):
|
||||
"""Tests for happinesutil.py."""
|
||||
|
||||
def test_merge_servers(self):
|
||||
# merge_servers merges a list of upload_servers and a dict of
|
||||
# shareid -> serverid mappings.
|
||||
shares = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server2"]),
|
||||
3 : set(["server3"]),
|
||||
4 : set(["server4", "server5"]),
|
||||
5 : set(["server1", "server2"]),
|
||||
}
|
||||
# if not provided with a upload_servers argument, it should just
|
||||
# return the first argument unchanged.
|
||||
self.failUnlessEqual(shares, merge_servers(shares, set([])))
|
||||
trackers = []
|
||||
for (i, server) in [(i, "server%d" % i) for i in range(5, 9)]:
|
||||
t = FakeServerTracker(server, [i])
|
||||
trackers.append(t)
|
||||
expected = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server2"]),
|
||||
3 : set(["server3"]),
|
||||
4 : set(["server4", "server5"]),
|
||||
5 : set(["server1", "server2", "server5"]),
|
||||
6 : set(["server6"]),
|
||||
7 : set(["server7"]),
|
||||
8 : set(["server8"]),
|
||||
}
|
||||
self.failUnlessEqual(expected, merge_servers(shares, set(trackers)))
|
||||
shares2 = {}
|
||||
expected = {
|
||||
5 : set(["server5"]),
|
||||
6 : set(["server6"]),
|
||||
7 : set(["server7"]),
|
||||
8 : set(["server8"]),
|
||||
}
|
||||
self.failUnlessEqual(expected, merge_servers(shares2, set(trackers)))
|
||||
shares3 = {}
|
||||
trackers = []
|
||||
expected = {}
|
||||
for (i, server) in [(i, "server%d" % i) for i in range(10)]:
|
||||
shares3[i] = set([server])
|
||||
t = FakeServerTracker(server, [i])
|
||||
trackers.append(t)
|
||||
expected[i] = set([server])
|
||||
self.failUnlessEqual(expected, merge_servers(shares3, set(trackers)))
|
||||
|
||||
|
||||
def test_servers_of_happiness_utility_function(self):
|
||||
# These tests are concerned with the servers_of_happiness()
|
||||
# utility function, and its underlying matching algorithm. Other
|
||||
# aspects of the servers_of_happiness behavior are tested
|
||||
# elsehwere These tests exist to ensure that
|
||||
# servers_of_happiness doesn't under or overcount the happiness
|
||||
# value for given inputs.
|
||||
|
||||
# servers_of_happiness expects a dict of
|
||||
# shnum => set(serverids) as a preexisting shares argument.
|
||||
test1 = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server2"]),
|
||||
3 : set(["server3"]),
|
||||
4 : set(["server4"])
|
||||
}
|
||||
happy = servers_of_happiness(test1)
|
||||
self.failUnlessEqual(4, happy)
|
||||
test1[4] = set(["server1"])
|
||||
# We've added a duplicate server, so now servers_of_happiness
|
||||
# should be 3 instead of 4.
|
||||
happy = servers_of_happiness(test1)
|
||||
self.failUnlessEqual(3, happy)
|
||||
# The second argument of merge_servers should be a set of objects with
|
||||
# serverid and buckets as attributes. In actual use, these will be
|
||||
# ServerTracker instances, but for testing it is fine to make a
|
||||
# FakeServerTracker whose job is to hold those instance variables to
|
||||
# test that part.
|
||||
trackers = []
|
||||
for (i, server) in [(i, "server%d" % i) for i in range(5, 9)]:
|
||||
t = FakeServerTracker(server, [i])
|
||||
trackers.append(t)
|
||||
# Recall that test1 is a server layout with servers_of_happiness
|
||||
# = 3. Since there isn't any overlap between the shnum ->
|
||||
# set([serverid]) correspondences in test1 and those in trackers,
|
||||
# the result here should be 7.
|
||||
test2 = merge_servers(test1, set(trackers))
|
||||
happy = servers_of_happiness(test2)
|
||||
self.failUnlessEqual(7, happy)
|
||||
# Now add an overlapping server to trackers. This is redundant,
|
||||
# so it should not cause the previously reported happiness value
|
||||
# to change.
|
||||
t = FakeServerTracker("server1", [1])
|
||||
trackers.append(t)
|
||||
test2 = merge_servers(test1, set(trackers))
|
||||
happy = servers_of_happiness(test2)
|
||||
self.failUnlessEqual(7, happy)
|
||||
test = {}
|
||||
happy = servers_of_happiness(test)
|
||||
self.failUnlessEqual(0, happy)
|
||||
# Test a more substantial overlap between the trackers and the
|
||||
# existing assignments.
|
||||
test = {
|
||||
1 : set(['server1']),
|
||||
2 : set(['server2']),
|
||||
3 : set(['server3']),
|
||||
4 : set(['server4']),
|
||||
}
|
||||
trackers = []
|
||||
t = FakeServerTracker('server5', [4])
|
||||
trackers.append(t)
|
||||
t = FakeServerTracker('server6', [3, 5])
|
||||
trackers.append(t)
|
||||
# The value returned by servers_of_happiness is the size
|
||||
# of a maximum matching in the bipartite graph that
|
||||
# servers_of_happiness() makes between serverids and share
|
||||
# numbers. It should find something like this:
|
||||
# (server 1, share 1)
|
||||
# (server 2, share 2)
|
||||
# (server 3, share 3)
|
||||
# (server 5, share 4)
|
||||
# (server 6, share 5)
|
||||
#
|
||||
# and, since there are 5 edges in this matching, it should
|
||||
# return 5.
|
||||
test2 = merge_servers(test, set(trackers))
|
||||
happy = servers_of_happiness(test2)
|
||||
self.failUnlessEqual(5, happy)
|
||||
# Zooko's first puzzle:
|
||||
# (from http://allmydata.org/trac/tahoe-lafs/ticket/778#comment:156)
|
||||
#
|
||||
# server 1: shares 0, 1
|
||||
# server 2: shares 1, 2
|
||||
# server 3: share 2
|
||||
#
|
||||
# This should yield happiness of 3.
|
||||
test = {
|
||||
0 : set(['server1']),
|
||||
1 : set(['server1', 'server2']),
|
||||
2 : set(['server2', 'server3']),
|
||||
}
|
||||
self.failUnlessEqual(3, servers_of_happiness(test))
|
||||
# Zooko's second puzzle:
|
||||
# (from http://allmydata.org/trac/tahoe-lafs/ticket/778#comment:158)
|
||||
#
|
||||
# server 1: shares 0, 1
|
||||
# server 2: share 1
|
||||
#
|
||||
# This should yield happiness of 2.
|
||||
test = {
|
||||
0 : set(['server1']),
|
||||
1 : set(['server1', 'server2']),
|
||||
}
|
||||
self.failUnlessEqual(2, servers_of_happiness(test))
|
||||
|
||||
|
||||
def test_shares_by_server(self):
|
||||
test = dict([(i, set(["server%d" % i])) for i in range(1, 5)])
|
||||
sbs = shares_by_server(test)
|
||||
self.failUnlessEqual(set([1]), sbs["server1"])
|
||||
self.failUnlessEqual(set([2]), sbs["server2"])
|
||||
self.failUnlessEqual(set([3]), sbs["server3"])
|
||||
self.failUnlessEqual(set([4]), sbs["server4"])
|
||||
test1 = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server1"]),
|
||||
3 : set(["server1"]),
|
||||
4 : set(["server2"]),
|
||||
5 : set(["server2"])
|
||||
}
|
||||
sbs = shares_by_server(test1)
|
||||
self.failUnlessEqual(set([1, 2, 3]), sbs["server1"])
|
||||
self.failUnlessEqual(set([4, 5]), sbs["server2"])
|
||||
# This should fail unless the serverid part of the mapping is a set
|
||||
test2 = {1: "server1"}
|
||||
self.shouldFail(AssertionError,
|
||||
"test_shares_by_server",
|
||||
"",
|
||||
shares_by_server, test2)
|
||||
|
@ -15,17 +15,15 @@ from allmydata.util import log, base32
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.deferredutil import DeferredListShouldSucceed
|
||||
from allmydata.test.no_network import GridTestMixin
|
||||
from allmydata.test.common_util import ShouldFailMixin
|
||||
from allmydata.util.happinessutil import servers_of_happiness, \
|
||||
shares_by_server, merge_servers
|
||||
from allmydata.test.common_py3 import ShouldFailMixin
|
||||
from allmydata.storage_client import StorageFarmBroker
|
||||
from allmydata.storage.server import storage_index_to_dir
|
||||
from allmydata.client import _Client
|
||||
|
||||
from .common import (
|
||||
EMPTY_CLIENT_CONFIG,
|
||||
)
|
||||
|
||||
|
||||
MiB = 1024*1024
|
||||
|
||||
def extract_uri(results):
|
||||
@ -864,12 +862,6 @@ def is_happy_enough(servertoshnums, h, k):
|
||||
return False
|
||||
return True
|
||||
|
||||
class FakeServerTracker(object):
|
||||
def __init__(self, serverid, buckets):
|
||||
self._serverid = serverid
|
||||
self.buckets = buckets
|
||||
def get_serverid(self):
|
||||
return self._serverid
|
||||
|
||||
class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
|
||||
ShouldFailMixin):
|
||||
@ -1499,185 +1491,6 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
|
||||
self._do_upload_with_broken_servers, 2))
|
||||
return d
|
||||
|
||||
|
||||
def test_merge_servers(self):
|
||||
# merge_servers merges a list of upload_servers and a dict of
|
||||
# shareid -> serverid mappings.
|
||||
shares = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server2"]),
|
||||
3 : set(["server3"]),
|
||||
4 : set(["server4", "server5"]),
|
||||
5 : set(["server1", "server2"]),
|
||||
}
|
||||
# if not provided with a upload_servers argument, it should just
|
||||
# return the first argument unchanged.
|
||||
self.failUnlessEqual(shares, merge_servers(shares, set([])))
|
||||
trackers = []
|
||||
for (i, server) in [(i, "server%d" % i) for i in xrange(5, 9)]:
|
||||
t = FakeServerTracker(server, [i])
|
||||
trackers.append(t)
|
||||
expected = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server2"]),
|
||||
3 : set(["server3"]),
|
||||
4 : set(["server4", "server5"]),
|
||||
5 : set(["server1", "server2", "server5"]),
|
||||
6 : set(["server6"]),
|
||||
7 : set(["server7"]),
|
||||
8 : set(["server8"]),
|
||||
}
|
||||
self.failUnlessEqual(expected, merge_servers(shares, set(trackers)))
|
||||
shares2 = {}
|
||||
expected = {
|
||||
5 : set(["server5"]),
|
||||
6 : set(["server6"]),
|
||||
7 : set(["server7"]),
|
||||
8 : set(["server8"]),
|
||||
}
|
||||
self.failUnlessEqual(expected, merge_servers(shares2, set(trackers)))
|
||||
shares3 = {}
|
||||
trackers = []
|
||||
expected = {}
|
||||
for (i, server) in [(i, "server%d" % i) for i in xrange(10)]:
|
||||
shares3[i] = set([server])
|
||||
t = FakeServerTracker(server, [i])
|
||||
trackers.append(t)
|
||||
expected[i] = set([server])
|
||||
self.failUnlessEqual(expected, merge_servers(shares3, set(trackers)))
|
||||
|
||||
|
||||
def test_servers_of_happiness_utility_function(self):
|
||||
# These tests are concerned with the servers_of_happiness()
|
||||
# utility function, and its underlying matching algorithm. Other
|
||||
# aspects of the servers_of_happiness behavior are tested
|
||||
# elsehwere These tests exist to ensure that
|
||||
# servers_of_happiness doesn't under or overcount the happiness
|
||||
# value for given inputs.
|
||||
|
||||
# servers_of_happiness expects a dict of
|
||||
# shnum => set(serverids) as a preexisting shares argument.
|
||||
test1 = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server2"]),
|
||||
3 : set(["server3"]),
|
||||
4 : set(["server4"])
|
||||
}
|
||||
happy = servers_of_happiness(test1)
|
||||
self.failUnlessEqual(4, happy)
|
||||
test1[4] = set(["server1"])
|
||||
# We've added a duplicate server, so now servers_of_happiness
|
||||
# should be 3 instead of 4.
|
||||
happy = servers_of_happiness(test1)
|
||||
self.failUnlessEqual(3, happy)
|
||||
# The second argument of merge_servers should be a set of objects with
|
||||
# serverid and buckets as attributes. In actual use, these will be
|
||||
# ServerTracker instances, but for testing it is fine to make a
|
||||
# FakeServerTracker whose job is to hold those instance variables to
|
||||
# test that part.
|
||||
trackers = []
|
||||
for (i, server) in [(i, "server%d" % i) for i in xrange(5, 9)]:
|
||||
t = FakeServerTracker(server, [i])
|
||||
trackers.append(t)
|
||||
# Recall that test1 is a server layout with servers_of_happiness
|
||||
# = 3. Since there isn't any overlap between the shnum ->
|
||||
# set([serverid]) correspondences in test1 and those in trackers,
|
||||
# the result here should be 7.
|
||||
test2 = merge_servers(test1, set(trackers))
|
||||
happy = servers_of_happiness(test2)
|
||||
self.failUnlessEqual(7, happy)
|
||||
# Now add an overlapping server to trackers. This is redundant,
|
||||
# so it should not cause the previously reported happiness value
|
||||
# to change.
|
||||
t = FakeServerTracker("server1", [1])
|
||||
trackers.append(t)
|
||||
test2 = merge_servers(test1, set(trackers))
|
||||
happy = servers_of_happiness(test2)
|
||||
self.failUnlessEqual(7, happy)
|
||||
test = {}
|
||||
happy = servers_of_happiness(test)
|
||||
self.failUnlessEqual(0, happy)
|
||||
# Test a more substantial overlap between the trackers and the
|
||||
# existing assignments.
|
||||
test = {
|
||||
1 : set(['server1']),
|
||||
2 : set(['server2']),
|
||||
3 : set(['server3']),
|
||||
4 : set(['server4']),
|
||||
}
|
||||
trackers = []
|
||||
t = FakeServerTracker('server5', [4])
|
||||
trackers.append(t)
|
||||
t = FakeServerTracker('server6', [3, 5])
|
||||
trackers.append(t)
|
||||
# The value returned by servers_of_happiness is the size
|
||||
# of a maximum matching in the bipartite graph that
|
||||
# servers_of_happiness() makes between serverids and share
|
||||
# numbers. It should find something like this:
|
||||
# (server 1, share 1)
|
||||
# (server 2, share 2)
|
||||
# (server 3, share 3)
|
||||
# (server 5, share 4)
|
||||
# (server 6, share 5)
|
||||
#
|
||||
# and, since there are 5 edges in this matching, it should
|
||||
# return 5.
|
||||
test2 = merge_servers(test, set(trackers))
|
||||
happy = servers_of_happiness(test2)
|
||||
self.failUnlessEqual(5, happy)
|
||||
# Zooko's first puzzle:
|
||||
# (from http://allmydata.org/trac/tahoe-lafs/ticket/778#comment:156)
|
||||
#
|
||||
# server 1: shares 0, 1
|
||||
# server 2: shares 1, 2
|
||||
# server 3: share 2
|
||||
#
|
||||
# This should yield happiness of 3.
|
||||
test = {
|
||||
0 : set(['server1']),
|
||||
1 : set(['server1', 'server2']),
|
||||
2 : set(['server2', 'server3']),
|
||||
}
|
||||
self.failUnlessEqual(3, servers_of_happiness(test))
|
||||
# Zooko's second puzzle:
|
||||
# (from http://allmydata.org/trac/tahoe-lafs/ticket/778#comment:158)
|
||||
#
|
||||
# server 1: shares 0, 1
|
||||
# server 2: share 1
|
||||
#
|
||||
# This should yield happiness of 2.
|
||||
test = {
|
||||
0 : set(['server1']),
|
||||
1 : set(['server1', 'server2']),
|
||||
}
|
||||
self.failUnlessEqual(2, servers_of_happiness(test))
|
||||
|
||||
|
||||
def test_shares_by_server(self):
|
||||
test = dict([(i, set(["server%d" % i])) for i in xrange(1, 5)])
|
||||
sbs = shares_by_server(test)
|
||||
self.failUnlessEqual(set([1]), sbs["server1"])
|
||||
self.failUnlessEqual(set([2]), sbs["server2"])
|
||||
self.failUnlessEqual(set([3]), sbs["server3"])
|
||||
self.failUnlessEqual(set([4]), sbs["server4"])
|
||||
test1 = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server1"]),
|
||||
3 : set(["server1"]),
|
||||
4 : set(["server2"]),
|
||||
5 : set(["server2"])
|
||||
}
|
||||
sbs = shares_by_server(test1)
|
||||
self.failUnlessEqual(set([1, 2, 3]), sbs["server1"])
|
||||
self.failUnlessEqual(set([4, 5]), sbs["server2"])
|
||||
# This should fail unless the serverid part of the mapping is a set
|
||||
test2 = {1: "server1"}
|
||||
self.shouldFail(AssertionError,
|
||||
"test_shares_by_server",
|
||||
"",
|
||||
shares_by_server, test2)
|
||||
|
||||
|
||||
def test_existing_share_detection(self):
|
||||
self.basedir = self.mktemp()
|
||||
d = self._setup_and_upload()
|
||||
|
@ -5,8 +5,6 @@ unknown_rwcap = u"lafs://from_the_future_rw_\u263A".encode('utf-8')
|
||||
unknown_rocap = u"ro.lafs://readonly_from_the_future_ro_\u263A".encode('utf-8')
|
||||
unknown_immcap = u"imm.lafs://immutable_from_the_future_imm_\u263A".encode('utf-8')
|
||||
|
||||
FAVICON_MARKUP = '<link href="/icon.png" rel="shortcut icon" />'
|
||||
|
||||
|
||||
def assert_soup_has_favicon(testcase, soup):
|
||||
"""
|
||||
|
@ -21,7 +21,12 @@ from allmydata.mutable import publish
|
||||
from .. import common_util as testutil
|
||||
from ..common import WebErrorMixin, ShouldFailMixin
|
||||
from ..no_network import GridTestMixin
|
||||
from .common import unknown_rwcap, unknown_rocap, unknown_immcap, FAVICON_MARKUP
|
||||
from .common import (
|
||||
assert_soup_has_favicon,
|
||||
unknown_immcap,
|
||||
unknown_rocap,
|
||||
unknown_rwcap,
|
||||
)
|
||||
|
||||
DIR_HTML_TAG = '<html lang="en">'
|
||||
|
||||
@ -92,7 +97,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
def _got_html_good(res):
|
||||
self.failUnlessIn("Healthy", res)
|
||||
self.failIfIn("Not Healthy", res)
|
||||
self.failUnlessIn(FAVICON_MARKUP, res)
|
||||
soup = BeautifulSoup(res, 'html5lib')
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
d.addCallback(_got_html_good)
|
||||
d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
|
||||
def _got_html_good_return_to(res):
|
||||
@ -235,7 +242,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.failUnlessIn("Healthy", res)
|
||||
self.failIfIn("Not Healthy", res)
|
||||
self.failUnlessIn("No repair necessary", res)
|
||||
self.failUnlessIn(FAVICON_MARKUP, res)
|
||||
soup = BeautifulSoup(res, 'html5lib')
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
d.addCallback(_got_html_good)
|
||||
|
||||
d.addCallback(self.CHECK, "sick", "t=check&repair=true")
|
||||
|
@ -54,6 +54,9 @@ from .common import (
|
||||
assert_soup_has_tag_with_attributes,
|
||||
assert_soup_has_tag_with_content,
|
||||
assert_soup_has_tag_with_attributes_and_content,
|
||||
unknown_rwcap,
|
||||
unknown_rocap,
|
||||
unknown_immcap,
|
||||
)
|
||||
|
||||
from allmydata.interfaces import IMutableFileNode, SDMF_VERSION, MDMF_VERSION
|
||||
@ -65,7 +68,6 @@ from ..common_web import (
|
||||
Error,
|
||||
)
|
||||
from allmydata.client import _Client, SecretHolder
|
||||
from .common import unknown_rwcap, unknown_rocap, unknown_immcap, FAVICON_MARKUP
|
||||
|
||||
# create a fake uploader/downloader, and a couple of fake dirnodes, then
|
||||
# create a webserver that works against them
|
||||
@ -3262,13 +3264,15 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
res = yield self.get_operation_results(None, "123", "html")
|
||||
self.failUnlessIn("Objects Checked: <span>11</span>", res)
|
||||
self.failUnlessIn("Objects Healthy: <span>11</span>", res)
|
||||
self.failUnlessIn(FAVICON_MARKUP, res)
|
||||
soup = BeautifulSoup(res, 'html5lib')
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
res = yield self.GET("/operations/123/")
|
||||
# should be the same as without the slash
|
||||
self.failUnlessIn("Objects Checked: <span>11</span>", res)
|
||||
self.failUnlessIn("Objects Healthy: <span>11</span>", res)
|
||||
self.failUnlessIn(FAVICON_MARKUP, res)
|
||||
soup = BeautifulSoup(res, 'html5lib')
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
yield self.shouldFail2(error.Error, "one", "404 Not Found",
|
||||
"No detailed results for SI bogus",
|
||||
@ -3318,7 +3322,8 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
self.failUnlessIn("Objects Unhealthy (after repair): <span>0</span>", res)
|
||||
self.failUnlessIn("Corrupt Shares (after repair): <span>0</span>", res)
|
||||
|
||||
self.failUnlessIn(FAVICON_MARKUP, res)
|
||||
soup = BeautifulSoup(res, 'html5lib')
|
||||
assert_soup_has_favicon(self, soup)
|
||||
d.addCallback(_check_html)
|
||||
return d
|
||||
|
||||
|
@ -1,6 +1,15 @@
|
||||
"""
|
||||
Track the port to Python 3.
|
||||
|
||||
The two easiest ways to run the part of the test suite which is expected to
|
||||
pass on Python 3 are::
|
||||
|
||||
$ tox -e py36
|
||||
|
||||
and::
|
||||
|
||||
$ trial allmydata.test.python3_tests
|
||||
|
||||
This module has been ported to Python 3.
|
||||
"""
|
||||
|
||||
@ -32,7 +41,9 @@ PORTED_MODULES = [
|
||||
"allmydata.util.deferredutil",
|
||||
"allmydata.util.fileutil",
|
||||
"allmydata.util.dictutil",
|
||||
"allmydata.util.encodingutil",
|
||||
"allmydata.util.gcutil",
|
||||
"allmydata.util.happinessutil",
|
||||
"allmydata.util.hashutil",
|
||||
"allmydata.util.humanreadable",
|
||||
"allmydata.util.iputil",
|
||||
@ -55,6 +66,7 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_crypto",
|
||||
"allmydata.test.test_deferredutil",
|
||||
"allmydata.test.test_dictutil",
|
||||
"allmydata.test.test_encodingutil",
|
||||
"allmydata.test.test_happiness",
|
||||
"allmydata.test.test_hashtree",
|
||||
"allmydata.test.test_hashutil",
|
||||
@ -71,7 +83,3 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_util",
|
||||
"allmydata.test.test_version",
|
||||
]
|
||||
|
||||
if __name__ == '__main__':
|
||||
from subprocess import check_call
|
||||
check_call(["trial"] + PORTED_TEST_MODULES)
|
||||
|
@ -1,9 +1,26 @@
|
||||
"""
|
||||
Functions used to convert inputs from whatever encoding used in the system to
|
||||
unicode and back.
|
||||
|
||||
Ported to Python 3.
|
||||
|
||||
Once Python 2 support is dropped, most of this module will obsolete, since
|
||||
Unicode is the default everywhere in Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2, PY3, native_str
|
||||
if PY2:
|
||||
# We omit str() because that seems too tricky to get right.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
import sys, os, re, locale
|
||||
import unicodedata
|
||||
|
||||
from allmydata.util.assertutil import precondition, _assert
|
||||
from twisted.python import usage
|
||||
@ -62,13 +79,14 @@ def _reload():
|
||||
|
||||
check_encoding(io_encoding)
|
||||
|
||||
is_unicode_platform = sys.platform in ["win32", "darwin"]
|
||||
is_unicode_platform = PY3 or sys.platform in ["win32", "darwin"]
|
||||
|
||||
# Despite the Unicode-mode FilePath support added to Twisted in
|
||||
# <https://twistedmatrix.com/trac/ticket/7805>, we can't yet use
|
||||
# Unicode-mode FilePaths with INotify on non-Windows platforms
|
||||
# due to <https://twistedmatrix.com/trac/ticket/7928>.
|
||||
use_unicode_filepath = sys.platform == "win32"
|
||||
# due to <https://twistedmatrix.com/trac/ticket/7928>. Supposedly
|
||||
# 7928 is fixed, though...
|
||||
use_unicode_filepath = PY3 or sys.platform == "win32"
|
||||
|
||||
_reload()
|
||||
|
||||
@ -89,7 +107,10 @@ def argv_to_unicode(s):
|
||||
"""
|
||||
Decode given argv element to unicode. If this fails, raise a UsageError.
|
||||
"""
|
||||
precondition(isinstance(s, str), s)
|
||||
if isinstance(s, unicode):
|
||||
return s
|
||||
|
||||
precondition(isinstance(s, bytes), s)
|
||||
|
||||
try:
|
||||
return unicode(s, io_encoding)
|
||||
@ -114,18 +135,22 @@ def unicode_to_argv(s, mangle=False):
|
||||
If the argument is to be passed to a different process, then the 'mangle' argument
|
||||
should be true; on Windows, this uses a mangled encoding that will be reversed by
|
||||
code in runner.py.
|
||||
|
||||
On Python 3, just return the string unchanged, since argv is unicode.
|
||||
"""
|
||||
precondition(isinstance(s, unicode), s)
|
||||
if PY3:
|
||||
return s
|
||||
|
||||
if mangle and sys.platform == "win32":
|
||||
# This must be the same as 'mangle' in bin/tahoe-script.template.
|
||||
return str(re.sub(u'[^\\x20-\\x7F]', lambda m: u'\x7F%x;' % (ord(m.group(0)),), s))
|
||||
return bytes(re.sub(u'[^\\x20-\\x7F]', lambda m: u'\x7F%x;' % (ord(m.group(0)),), s), io_encoding)
|
||||
else:
|
||||
return s.encode(io_encoding)
|
||||
|
||||
def unicode_to_url(s):
|
||||
"""
|
||||
Encode an unicode object used in an URL.
|
||||
Encode an unicode object used in an URL to bytes.
|
||||
"""
|
||||
# According to RFC 2718, non-ascii characters in URLs must be UTF-8 encoded.
|
||||
|
||||
@ -134,19 +159,19 @@ def unicode_to_url(s):
|
||||
#precondition(isinstance(s, unicode), s)
|
||||
#return s.encode('utf-8')
|
||||
|
||||
def to_str(s):
|
||||
if s is None or isinstance(s, str):
|
||||
def to_str(s): # TODO rename to to_bytes
|
||||
if s is None or isinstance(s, bytes):
|
||||
return s
|
||||
return s.encode('utf-8')
|
||||
|
||||
def from_utf8_or_none(s):
|
||||
precondition(isinstance(s, str) or s is None, s)
|
||||
precondition(isinstance(s, bytes) or s is None, s)
|
||||
if s is None:
|
||||
return s
|
||||
return s.decode('utf-8')
|
||||
|
||||
PRINTABLE_ASCII = re.compile(r'^[\n\r\x20-\x7E]*$', re.DOTALL)
|
||||
PRINTABLE_8BIT = re.compile(r'^[\n\r\x20-\x7E\x80-\xFF]*$', re.DOTALL)
|
||||
PRINTABLE_ASCII = re.compile(br'^[\n\r\x20-\x7E]*$', re.DOTALL)
|
||||
PRINTABLE_8BIT = re.compile(br'^[\n\r\x20-\x7E\x80-\xFF]*$', re.DOTALL)
|
||||
|
||||
def is_printable_ascii(s):
|
||||
return PRINTABLE_ASCII.search(s) is not None
|
||||
@ -160,14 +185,14 @@ def unicode_to_output(s):
|
||||
try:
|
||||
out = s.encode(io_encoding)
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
raise UnicodeEncodeError(io_encoding, s, 0, 0,
|
||||
"A string could not be encoded as %s for output to the terminal:\n%r" %
|
||||
(io_encoding, repr(s)))
|
||||
raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0,
|
||||
native_str("A string could not be encoded as %s for output to the terminal:\n%r" %
|
||||
(io_encoding, repr(s))))
|
||||
|
||||
if PRINTABLE_8BIT.search(out) is None:
|
||||
raise UnicodeEncodeError(io_encoding, s, 0, 0,
|
||||
"A string encoded as %s for output to the terminal contained unsafe bytes:\n%r" %
|
||||
(io_encoding, repr(s)))
|
||||
raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0,
|
||||
native_str("A string encoded as %s for output to the terminal contained unsafe bytes:\n%r" %
|
||||
(io_encoding, repr(s))))
|
||||
return out
|
||||
|
||||
|
||||
@ -188,14 +213,17 @@ def _unicode_escape(m, quote_newlines):
|
||||
else:
|
||||
return u'\\x%02x' % (codepoint,)
|
||||
|
||||
def _str_escape(m, quote_newlines):
|
||||
def _str_escape(m, quote_newlines): # TODO rename to _bytes_escape
|
||||
"""
|
||||
Takes a re match on bytes, the result is escaped bytes of group(0).
|
||||
"""
|
||||
c = m.group(0)
|
||||
if c == '"' or c == '$' or c == '`' or c == '\\':
|
||||
return '\\' + c
|
||||
elif c == '\n' and not quote_newlines:
|
||||
if c == b'"' or c == b'$' or c == b'`' or c == b'\\':
|
||||
return b'\\' + c
|
||||
elif c == b'\n' and not quote_newlines:
|
||||
return c
|
||||
else:
|
||||
return '\\x%02x' % (ord(c),)
|
||||
return b'\\x%02x' % (ord(c),)
|
||||
|
||||
MUST_DOUBLE_QUOTE_NL = re.compile(u'[^\\x20-\\x26\\x28-\\x7E\u00A0-\uD7FF\uE000-\uFDCF\uFDF0-\uFFFC]', re.DOTALL)
|
||||
MUST_DOUBLE_QUOTE = re.compile(u'[^\\n\\x20-\\x26\\x28-\\x7E\u00A0-\uD7FF\uE000-\uFDCF\uFDF0-\uFFFC]', re.DOTALL)
|
||||
@ -205,7 +233,7 @@ ESCAPABLE_UNICODE = re.compile(u'([\uD800-\uDBFF][\uDC00-\uDFFF])|' # valid sur
|
||||
u'[^ !#\\x25-\\x5B\\x5D-\\x5F\\x61-\\x7E\u00A0-\uD7FF\uE000-\uFDCF\uFDF0-\uFFFC]',
|
||||
re.DOTALL)
|
||||
|
||||
ESCAPABLE_8BIT = re.compile( r'[^ !#\x25-\x5B\x5D-\x5F\x61-\x7E]', re.DOTALL)
|
||||
ESCAPABLE_8BIT = re.compile( br'[^ !#\x25-\x5B\x5D-\x5F\x61-\x7E]', re.DOTALL)
|
||||
|
||||
def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
|
||||
"""
|
||||
@ -221,32 +249,32 @@ def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
|
||||
|
||||
If not explicitly given, quote_newlines is True when quotemarks is True.
|
||||
"""
|
||||
precondition(isinstance(s, (str, unicode)), s)
|
||||
precondition(isinstance(s, (bytes, unicode)), s)
|
||||
if quote_newlines is None:
|
||||
quote_newlines = quotemarks
|
||||
|
||||
if isinstance(s, str):
|
||||
if isinstance(s, bytes):
|
||||
try:
|
||||
s = s.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
return 'b"%s"' % (ESCAPABLE_8BIT.sub(lambda m: _str_escape(m, quote_newlines), s),)
|
||||
return b'b"%s"' % (ESCAPABLE_8BIT.sub(lambda m: _str_escape(m, quote_newlines), s),)
|
||||
|
||||
must_double_quote = quote_newlines and MUST_DOUBLE_QUOTE_NL or MUST_DOUBLE_QUOTE
|
||||
if must_double_quote.search(s) is None:
|
||||
try:
|
||||
out = s.encode(encoding or io_encoding)
|
||||
if quotemarks or out.startswith('"'):
|
||||
return "'%s'" % (out,)
|
||||
if quotemarks or out.startswith(b'"'):
|
||||
return b"'%s'" % (out,)
|
||||
else:
|
||||
return out
|
||||
except (UnicodeDecodeError, UnicodeEncodeError):
|
||||
pass
|
||||
|
||||
escaped = ESCAPABLE_UNICODE.sub(lambda m: _unicode_escape(m, quote_newlines), s)
|
||||
return '"%s"' % (escaped.encode(encoding or io_encoding, 'backslashreplace'),)
|
||||
return b'"%s"' % (escaped.encode(encoding or io_encoding, 'backslashreplace'),)
|
||||
|
||||
def quote_path(path, quotemarks=True):
|
||||
return quote_output("/".join(map(to_str, path)), quotemarks=quotemarks, quote_newlines=True)
|
||||
return quote_output(b"/".join(map(to_str, path)), quotemarks=quotemarks, quote_newlines=True)
|
||||
|
||||
def quote_local_unicode_path(path, quotemarks=True):
|
||||
precondition(isinstance(path, unicode), path)
|
||||
@ -275,7 +303,7 @@ def extend_filepath(fp, segments):
|
||||
return fp
|
||||
|
||||
def to_filepath(path):
|
||||
precondition(isinstance(path, unicode if use_unicode_filepath else basestring),
|
||||
precondition(isinstance(path, unicode if use_unicode_filepath else (bytes, unicode)),
|
||||
path=path)
|
||||
|
||||
if isinstance(path, unicode) and not use_unicode_filepath:
|
||||
@ -290,7 +318,7 @@ def to_filepath(path):
|
||||
return FilePath(path)
|
||||
|
||||
def _decode(s):
|
||||
precondition(isinstance(s, basestring), s=s)
|
||||
precondition(isinstance(s, (bytes, unicode)), s=s)
|
||||
|
||||
if isinstance(s, bytes):
|
||||
return s.decode(filesystem_encoding)
|
||||
@ -356,3 +384,9 @@ def listdir_unicode(path):
|
||||
|
||||
def listdir_filepath(fp):
|
||||
return listdir_unicode(unicode_from_filepath(fp))
|
||||
|
||||
|
||||
# 'x' at the end of a variable name indicates that it holds a Unicode string that may not
|
||||
# be NFC-normalized.
|
||||
def normalize(namex):
|
||||
return unicodedata.normalize('NFC', namex)
|
||||
|
@ -1,7 +1,18 @@
|
||||
"""
|
||||
I contain utilities useful for calculating servers_of_happiness, and for
|
||||
reporting it in messages
|
||||
reporting it in messages.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# We omit dict, just in case newdict breaks things.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from copy import deepcopy
|
||||
from allmydata.immutable.happiness_upload import residual_network
|
||||
@ -51,7 +62,7 @@ def shares_by_server(servermap):
|
||||
dictionary of sets of shares, indexed by peerids.
|
||||
"""
|
||||
ret = {}
|
||||
for shareid, peers in servermap.iteritems():
|
||||
for shareid, peers in servermap.items():
|
||||
assert isinstance(peers, set)
|
||||
for peerid in peers:
|
||||
ret.setdefault(peerid, set()).add(shareid)
|
||||
@ -146,7 +157,7 @@ def servers_of_happiness(sharemap):
|
||||
# The implementation here is an adapation of an algorithm described in
|
||||
# "Introduction to Algorithms", Cormen et al, 2nd ed., pp 658-662.
|
||||
dim = len(graph)
|
||||
flow_function = [[0 for sh in xrange(dim)] for s in xrange(dim)]
|
||||
flow_function = [[0 for sh in range(dim)] for s in range(dim)]
|
||||
residual_graph, residual_function = residual_network(graph, flow_function)
|
||||
while augmenting_path_for(residual_graph):
|
||||
path = augmenting_path_for(residual_graph)
|
||||
@ -169,7 +180,7 @@ def servers_of_happiness(sharemap):
|
||||
# our graph, so we can stop after summing flow across those. The
|
||||
# value of a flow computed in this way is the size of a maximum
|
||||
# matching on the bipartite graph described above.
|
||||
return sum([flow_function[0][v] for v in xrange(1, num_servers+1)])
|
||||
return sum([flow_function[0][v] for v in range(1, num_servers+1)])
|
||||
|
||||
def _flow_network_for(servermap):
|
||||
"""
|
||||
@ -198,14 +209,14 @@ def _flow_network_for(servermap):
|
||||
graph = [] # index -> [index], an adjacency list
|
||||
# Add an entry at the top (index 0) that has an edge to every server
|
||||
# in servermap
|
||||
graph.append(servermap.keys())
|
||||
graph.append(list(servermap.keys()))
|
||||
# For each server, add an entry that has an edge to every share that it
|
||||
# contains (or will contain).
|
||||
for k in servermap:
|
||||
graph.append(servermap[k])
|
||||
# For each share, add an entry that has an edge to the sink.
|
||||
sink_num = num_servers + num_shares + 1
|
||||
for i in xrange(num_shares):
|
||||
for i in range(num_shares):
|
||||
graph.append([sink_num])
|
||||
# Add an empty entry for the sink, which has no outbound edges.
|
||||
graph.append([])
|
||||
@ -231,8 +242,8 @@ def _reindex(servermap, base_index):
|
||||
# Number the shares
|
||||
for k in ret:
|
||||
for shnum in ret[k]:
|
||||
if not shares.has_key(shnum):
|
||||
if shnum not in shares:
|
||||
shares[shnum] = num
|
||||
num += 1
|
||||
ret[k] = map(lambda x: shares[x], ret[k])
|
||||
ret[k] = [shares[x] for x in ret[k]]
|
||||
return (ret, len(shares))
|
||||
|
@ -1,4 +1,4 @@
|
||||
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
|
||||
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
|
||||
<head>
|
||||
<title>Tahoe-LAFS - Check Results</title>
|
||||
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
|
||||
@ -7,17 +7,17 @@
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h1>File Check-And-Repair Results for SI=<span n:render="storage_index" /></h1>
|
||||
<h1>File Check-And-Repair Results for SI=<span t:render="storage_index" /></h1>
|
||||
|
||||
<div n:render="summary" />
|
||||
<div t:render="summary" />
|
||||
|
||||
<div n:render="repair_results" />
|
||||
<div t:render="repair_results" />
|
||||
|
||||
<div n:render="post_repair_results" />
|
||||
<div t:render="post_repair_results" />
|
||||
|
||||
<div n:render="maybe_pre_repair_results" />
|
||||
<div t:render="maybe_pre_repair_results" />
|
||||
|
||||
<div n:render="return" />
|
||||
<div t:render="return_to" />
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
@ -1,4 +1,4 @@
|
||||
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
|
||||
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
|
||||
<head>
|
||||
<title>Tahoe-LAFS - Check Results</title>
|
||||
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
|
||||
@ -7,17 +7,17 @@
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h1>File Check Results for SI=<span n:render="storage_index" /></h1>
|
||||
<h1>File Check Results for SI=<span t:render="storage_index" /></h1>
|
||||
|
||||
<div>
|
||||
<span n:render="summary" />
|
||||
<span t:render="summary" />
|
||||
</div>
|
||||
|
||||
<div n:render="repair" />
|
||||
<div t:render="repair" />
|
||||
|
||||
<div n:render="results" />
|
||||
<div t:render="results" />
|
||||
|
||||
<div n:render="return" />
|
||||
<div t:render="return_to" />
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
@ -1,12 +1,35 @@
|
||||
|
||||
import time
|
||||
import json
|
||||
from nevow import rend, inevow, tags as T
|
||||
from twisted.web import http, html
|
||||
from allmydata.web.common import getxmlfile, get_arg, get_root, WebError
|
||||
|
||||
from twisted.web import (
|
||||
http,
|
||||
html,
|
||||
)
|
||||
from twisted.python.filepath import FilePath
|
||||
from twisted.web.template import (
|
||||
Element,
|
||||
XMLFile,
|
||||
renderer,
|
||||
renderElement,
|
||||
tags,
|
||||
)
|
||||
from allmydata.web.common import (
|
||||
get_arg,
|
||||
get_root,
|
||||
WebError,
|
||||
MultiFormatResource,
|
||||
SlotsSequenceElement,
|
||||
)
|
||||
from allmydata.web.operations import ReloadMixin
|
||||
from allmydata.interfaces import ICheckAndRepairResults, ICheckResults
|
||||
from allmydata.util import base32, dictutil
|
||||
from allmydata.interfaces import (
|
||||
ICheckAndRepairResults,
|
||||
ICheckResults,
|
||||
)
|
||||
from allmydata.util import (
|
||||
base32,
|
||||
dictutil,
|
||||
)
|
||||
|
||||
|
||||
def json_check_counts(r):
|
||||
@ -64,53 +87,64 @@ def json_check_and_repair_results(r):
|
||||
return data
|
||||
|
||||
class ResultsBase(object):
|
||||
# self.client must point to the Client, so we can get nicknames and
|
||||
# self._client must point to the Client, so we can get nicknames and
|
||||
# determine the permuted peer order
|
||||
|
||||
def _join_pathstring(self, path):
|
||||
"""
|
||||
:param tuple path: a path represented by a tuple, such as
|
||||
``(u'some', u'dir', u'file')``.
|
||||
|
||||
:return: a string joined by path separaters, such as
|
||||
``u'some/dir/file'``.
|
||||
"""
|
||||
if path:
|
||||
pathstring = "/".join(self._html(path))
|
||||
else:
|
||||
pathstring = "<root>"
|
||||
return pathstring
|
||||
|
||||
def _render_results(self, ctx, cr):
|
||||
def _render_results(self, req, cr):
|
||||
assert ICheckResults(cr)
|
||||
c = self.client
|
||||
c = self._client
|
||||
sb = c.get_storage_broker()
|
||||
r = []
|
||||
def add(name, value):
|
||||
r.append(T.li[name + ": ", value])
|
||||
r.append(tags.li(name + ": ", value))
|
||||
|
||||
add("Report", tags.pre("\n".join(self._html(cr.get_report()))))
|
||||
|
||||
add("Report", T.pre["\n".join(self._html(cr.get_report()))])
|
||||
add("Share Counts",
|
||||
"need %d-of-%d, have %d" % (cr.get_encoding_needed(),
|
||||
cr.get_encoding_expected(),
|
||||
cr.get_share_counter_good()))
|
||||
add("Happiness Level", cr.get_happiness())
|
||||
add("Hosts with good shares", cr.get_host_counter_good_shares())
|
||||
add("Happiness Level", str(cr.get_happiness()))
|
||||
add("Hosts with good shares", str(cr.get_host_counter_good_shares()))
|
||||
|
||||
if cr.get_corrupt_shares():
|
||||
badsharemap = []
|
||||
for (s, si, shnum) in cr.get_corrupt_shares():
|
||||
d = T.tr[T.td["sh#%d" % shnum],
|
||||
T.td[T.div(class_="nickname")[s.get_nickname()],
|
||||
T.div(class_="nodeid")[T.tt[s.get_name()]]],
|
||||
]
|
||||
d = tags.tr(tags.td("sh#%d" % shnum),
|
||||
tags.td(tags.div(s.get_nickname(), class_="nickname"),
|
||||
tags.div(tags.tt(s.get_name()), class_="nodeid")),)
|
||||
badsharemap.append(d)
|
||||
add("Corrupt shares", T.table()[
|
||||
T.tr[T.th["Share ID"],
|
||||
T.th(class_="nickname-and-peerid")[T.div["Nickname"], T.div(class_="nodeid")["Node ID"]]],
|
||||
badsharemap])
|
||||
add("Corrupt shares",
|
||||
tags.table(
|
||||
tags.tr(tags.th("Share ID"),
|
||||
tags.th((tags.div("Nickname"), tags.div("Node ID", class_="nodeid")), class_="nickname-and-peerid")),
|
||||
badsharemap))
|
||||
else:
|
||||
add("Corrupt shares", "none")
|
||||
|
||||
add("Wrong Shares", cr.get_share_counter_wrong())
|
||||
add("Wrong Shares", str(cr.get_share_counter_wrong()))
|
||||
|
||||
sharemap_data = []
|
||||
shares_on_server = dictutil.DictOfSets()
|
||||
|
||||
# FIXME: The two tables below contain nickname-and-nodeid table column markup which is duplicated with each other, introducer.xhtml, and deep-check-results.xhtml. All of these (and any other presentations of nickname-and-nodeid) should be combined.
|
||||
# FIXME: The two tables below contain nickname-and-nodeid
|
||||
# table column markup which is duplicated with each other,
|
||||
# introducer.xhtml, and deep-check-results.xhtml. All of these
|
||||
# (and any other presentations of nickname-and-nodeid) should be combined.
|
||||
|
||||
for shareid in sorted(cr.get_sharemap().keys()):
|
||||
servers = sorted(cr.get_sharemap()[shareid],
|
||||
@ -119,19 +153,20 @@ class ResultsBase(object):
|
||||
shares_on_server.add(s, shareid)
|
||||
shareid_s = ""
|
||||
if i == 0:
|
||||
shareid_s = shareid
|
||||
d = T.tr[T.td[shareid_s],
|
||||
T.td[T.div(class_="nickname")[s.get_nickname()],
|
||||
T.div(class_="nodeid")[T.tt[s.get_name()]]]
|
||||
]
|
||||
shareid_s = str(shareid)
|
||||
d = tags.tr(tags.td(shareid_s),
|
||||
tags.td(tags.div(s.get_nickname(), class_="nickname"),
|
||||
tags.div(tags.tt(s.get_name()), class_="nodeid")))
|
||||
sharemap_data.append(d)
|
||||
|
||||
add("Good Shares (sorted in share order)",
|
||||
T.table()[T.tr[T.th["Share ID"], T.th(class_="nickname-and-peerid")[T.div["Nickname"], T.div(class_="nodeid")["Node ID"]]],
|
||||
sharemap_data])
|
||||
tags.table(tags.tr(tags.th("Share ID"),
|
||||
tags.th(tags.div("Nickname"),
|
||||
tags.div("Node ID", class_="nodeid"), class_="nickname-and-peerid")),
|
||||
sharemap_data))
|
||||
|
||||
|
||||
add("Recoverable Versions", cr.get_version_counter_recoverable())
|
||||
add("Unrecoverable Versions", cr.get_version_counter_unrecoverable())
|
||||
add("Recoverable Versions", str(cr.get_version_counter_recoverable()))
|
||||
add("Unrecoverable Versions", str(cr.get_version_counter_unrecoverable()))
|
||||
|
||||
# this table is sorted by permuted order
|
||||
permuted_servers = [s
|
||||
@ -144,20 +179,23 @@ class ResultsBase(object):
|
||||
for s in permuted_servers:
|
||||
shareids = list(shares_on_server.get(s, []))
|
||||
shareids.reverse()
|
||||
shareids_s = [ T.tt[shareid, " "] for shareid in sorted(shareids) ]
|
||||
d = T.tr[T.td[T.div(class_="nickname")[s.get_nickname()],
|
||||
T.div(class_="nodeid")[T.tt[s.get_name()]]],
|
||||
T.td[shareids_s],
|
||||
]
|
||||
shareids_s = [tags.tt(str(shareid), " ") for shareid in sorted(shareids)]
|
||||
|
||||
d = tags.tr(tags.td(tags.div(s.get_nickname(), class_="nickname"),
|
||||
tags.div(tags.tt(s.get_name()), class_="nodeid")),
|
||||
tags.td(shareids_s), )
|
||||
servermap.append(d)
|
||||
num_shares_left -= len(shareids)
|
||||
if not num_shares_left:
|
||||
break
|
||||
add("Share Balancing (servers in permuted order)",
|
||||
T.table()[T.tr[T.th(class_="nickname-and-peerid")[T.div["Nickname"], T.div(class_="nodeid")["Node ID"]], T.th["Share IDs"]],
|
||||
servermap])
|
||||
|
||||
return T.ul[r]
|
||||
add("Share Balancing (servers in permuted order)",
|
||||
tags.table(tags.tr(tags.th(tags.div("Nickname"),
|
||||
tags.div("Node ID", class_="nodeid"), class_="nickname-and-peerid"),
|
||||
tags.th("Share IDs")),
|
||||
servermap))
|
||||
|
||||
return tags.ul(r)
|
||||
|
||||
def _html(self, s):
|
||||
if isinstance(s, (str, unicode)):
|
||||
@ -165,91 +203,114 @@ class ResultsBase(object):
|
||||
assert isinstance(s, (list, tuple))
|
||||
return [html.escape(w) for w in s]
|
||||
|
||||
def want_json(self, ctx):
|
||||
output = get_arg(inevow.IRequest(ctx), "output", "").lower()
|
||||
if output.lower() == "json":
|
||||
return True
|
||||
return False
|
||||
|
||||
def _render_si_link(self, ctx, storage_index):
|
||||
def _render_si_link(self, req, storage_index):
|
||||
si_s = base32.b2a(storage_index)
|
||||
req = inevow.IRequest(ctx)
|
||||
ophandle = req.prepath[-1]
|
||||
target = "%s/operations/%s/%s" % (get_root(ctx), ophandle, si_s)
|
||||
output = get_arg(ctx, "output")
|
||||
target = "%s/operations/%s/%s" % (get_root(req), ophandle, si_s)
|
||||
output = get_arg(req, "output")
|
||||
if output:
|
||||
target = target + "?output=%s" % output
|
||||
return T.a(href=target)[si_s]
|
||||
return tags.a(si_s, href=target)
|
||||
|
||||
class LiteralCheckResultsRenderer(rend.Page, ResultsBase):
|
||||
docFactory = getxmlfile("literal-check-results.xhtml")
|
||||
|
||||
class LiteralCheckResultsRenderer(MultiFormatResource, ResultsBase):
|
||||
|
||||
formatArgument = "output"
|
||||
|
||||
def __init__(self, client):
|
||||
self.client = client
|
||||
rend.Page.__init__(self, client)
|
||||
"""
|
||||
:param allmydata.interfaces.IStatsProducer client: stats provider.
|
||||
"""
|
||||
super(LiteralCheckResultsRenderer, self).__init__()
|
||||
self._client = client
|
||||
|
||||
def renderHTTP(self, ctx):
|
||||
if self.want_json(ctx):
|
||||
return self.json(ctx)
|
||||
return rend.Page.renderHTTP(self, ctx)
|
||||
def render_HTML(self, req):
|
||||
return renderElement(req, LiteralCheckResultsRendererElement())
|
||||
|
||||
def json(self, ctx):
|
||||
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
||||
def render_JSON(self, req):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
data = json_check_results(None)
|
||||
return json.dumps(data, indent=1) + "\n"
|
||||
|
||||
def render_return(self, ctx, data):
|
||||
req = inevow.IRequest(ctx)
|
||||
|
||||
class LiteralCheckResultsRendererElement(Element):
|
||||
|
||||
loader = XMLFile(FilePath(__file__).sibling("literal-check-results.xhtml"))
|
||||
|
||||
def __init__(self):
|
||||
super(LiteralCheckResultsRendererElement, self).__init__()
|
||||
|
||||
@renderer
|
||||
def return_to(self, req, tag):
|
||||
return_to = get_arg(req, "return_to", None)
|
||||
if return_to:
|
||||
return T.div[T.a(href=return_to)["Return to file."]]
|
||||
return tags.div(tags.a("Return to file.", href=return_to))
|
||||
return ""
|
||||
|
||||
|
||||
class CheckerBase(object):
|
||||
|
||||
def renderHTTP(self, ctx):
|
||||
if self.want_json(ctx):
|
||||
return self.json(ctx)
|
||||
return rend.Page.renderHTTP(self, ctx)
|
||||
@renderer
|
||||
def storage_index(self, req, tag):
|
||||
return self._results.get_storage_index_string()
|
||||
|
||||
def render_storage_index(self, ctx, data):
|
||||
return self.r.get_storage_index_string()
|
||||
|
||||
def render_return(self, ctx, data):
|
||||
req = inevow.IRequest(ctx)
|
||||
@renderer
|
||||
def return_to(self, req, tag):
|
||||
return_to = get_arg(req, "return_to", None)
|
||||
if return_to:
|
||||
return T.div[T.a(href=return_to)["Return to file/directory."]]
|
||||
return tags.div(tags.a("Return to file/directory.", href=return_to))
|
||||
return ""
|
||||
|
||||
class CheckResultsRenderer(CheckerBase, rend.Page, ResultsBase):
|
||||
docFactory = getxmlfile("check-results.xhtml")
|
||||
|
||||
class CheckResultsRenderer(MultiFormatResource):
|
||||
|
||||
formatArgument = "output"
|
||||
|
||||
def __init__(self, client, results):
|
||||
self.client = client
|
||||
self.r = ICheckResults(results)
|
||||
rend.Page.__init__(self, results)
|
||||
"""
|
||||
:param allmydata.interfaces.IStatsProducer client: stats provider.
|
||||
:param allmydata.interfaces.ICheckResults results: results of check/vefify operation.
|
||||
"""
|
||||
super(CheckResultsRenderer, self).__init__()
|
||||
self._client = client
|
||||
self._results = ICheckResults(results)
|
||||
|
||||
def json(self, ctx):
|
||||
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
||||
data = json_check_results(self.r)
|
||||
def render_HTML(self, req):
|
||||
return renderElement(req, CheckResultsRendererElement(self._client, self._results))
|
||||
|
||||
def render_JSON(self, req):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
data = json_check_results(self._results)
|
||||
return json.dumps(data, indent=1) + "\n"
|
||||
|
||||
def render_summary(self, ctx, data):
|
||||
|
||||
class CheckResultsRendererElement(Element, CheckerBase, ResultsBase):
|
||||
|
||||
loader = XMLFile(FilePath(__file__).sibling("check-results.xhtml"))
|
||||
|
||||
def __init__(self, client, results):
|
||||
super(CheckResultsRendererElement, self).__init__()
|
||||
self._client = client
|
||||
self._results = results
|
||||
|
||||
@renderer
|
||||
def summary(self, req, tag):
|
||||
results = []
|
||||
if data.is_healthy():
|
||||
if self._results.is_healthy():
|
||||
results.append("Healthy")
|
||||
elif data.is_recoverable():
|
||||
elif self._results.is_recoverable():
|
||||
results.append("Not Healthy!")
|
||||
else:
|
||||
results.append("Not Recoverable!")
|
||||
results.append(" : ")
|
||||
results.append(self._html(data.get_summary()))
|
||||
return ctx.tag[results]
|
||||
results.append(self._html(self._results.get_summary()))
|
||||
return tag(results)
|
||||
|
||||
def render_repair(self, ctx, data):
|
||||
if data.is_healthy():
|
||||
@renderer
|
||||
def repair(self, req, tag):
|
||||
if self._results.is_healthy():
|
||||
return ""
|
||||
|
||||
#repair = T.form(action=".", method="post",
|
||||
# enctype="multipart/form-data")[
|
||||
# T.fieldset[
|
||||
@ -258,30 +319,52 @@ class CheckResultsRenderer(CheckerBase, rend.Page, ResultsBase):
|
||||
# T.input(type="submit", value="Repair"),
|
||||
# ]]
|
||||
#return ctx.tag[repair]
|
||||
|
||||
return "" # repair button disabled until we make it work correctly,
|
||||
# see #622 for details
|
||||
|
||||
def render_results(self, ctx, data):
|
||||
cr = self._render_results(ctx, data)
|
||||
return ctx.tag[cr]
|
||||
@renderer
|
||||
def results(self, req, tag):
|
||||
cr = self._render_results(req, self._results)
|
||||
return tag(cr)
|
||||
|
||||
class CheckAndRepairResultsRenderer(CheckerBase, rend.Page, ResultsBase):
|
||||
docFactory = getxmlfile("check-and-repair-results.xhtml")
|
||||
class CheckAndRepairResultsRenderer(MultiFormatResource):
|
||||
|
||||
formatArgument = "output"
|
||||
|
||||
def __init__(self, client, results):
|
||||
self.client = client
|
||||
self.r = None
|
||||
"""
|
||||
:param allmydata.interfaces.IStatsProducer client: stats provider.
|
||||
:param allmydata.interfaces.ICheckResults results: check/verify results.
|
||||
"""
|
||||
super(CheckAndRepairResultsRenderer, self).__init__()
|
||||
self._client = client
|
||||
self._results = None
|
||||
if results:
|
||||
self.r = ICheckAndRepairResults(results)
|
||||
rend.Page.__init__(self, results)
|
||||
self._results = ICheckAndRepairResults(results)
|
||||
|
||||
def json(self, ctx):
|
||||
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
||||
data = json_check_and_repair_results(self.r)
|
||||
def render_HTML(self, req):
|
||||
elem = CheckAndRepairResultsRendererElement(self._client, self._results)
|
||||
return renderElement(req, elem)
|
||||
|
||||
def render_JSON(self, req):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
data = json_check_and_repair_results(self._results)
|
||||
return json.dumps(data, indent=1) + "\n"
|
||||
|
||||
def render_summary(self, ctx, data):
|
||||
cr = data.get_post_repair_results()
|
||||
|
||||
class CheckAndRepairResultsRendererElement(Element, CheckerBase, ResultsBase):
|
||||
|
||||
loader = XMLFile(FilePath(__file__).sibling("check-and-repair-results.xhtml"))
|
||||
|
||||
def __init__(self, client, results):
|
||||
super(CheckAndRepairResultsRendererElement, self).__init__()
|
||||
self._client = client
|
||||
self._results = results
|
||||
|
||||
@renderer
|
||||
def summary(self, req, tag):
|
||||
cr = self._results.get_post_repair_results()
|
||||
results = []
|
||||
if cr.is_healthy():
|
||||
results.append("Healthy")
|
||||
@ -291,35 +374,44 @@ class CheckAndRepairResultsRenderer(CheckerBase, rend.Page, ResultsBase):
|
||||
results.append("Not Recoverable!")
|
||||
results.append(" : ")
|
||||
results.append(self._html(cr.get_summary()))
|
||||
return ctx.tag[results]
|
||||
return tag(results)
|
||||
|
||||
def render_repair_results(self, ctx, data):
|
||||
if data.get_repair_attempted():
|
||||
if data.get_repair_successful():
|
||||
return ctx.tag["Repair successful"]
|
||||
@renderer
|
||||
def repair_results(self, req, tag):
|
||||
if self._results.get_repair_attempted():
|
||||
if self._results.get_repair_successful():
|
||||
return tag("Repair successful")
|
||||
else:
|
||||
return ctx.tag["Repair unsuccessful"]
|
||||
return ctx.tag["No repair necessary"]
|
||||
return tag("Repair unsuccessful")
|
||||
return tag("No repair necessary")
|
||||
|
||||
def render_post_repair_results(self, ctx, data):
|
||||
cr = self._render_results(ctx, data.get_post_repair_results())
|
||||
return ctx.tag[T.div["Post-Repair Checker Results:"], cr]
|
||||
@renderer
|
||||
def post_repair_results(self, req, tag):
|
||||
cr = self._render_results(req, self._results.get_post_repair_results())
|
||||
return tag(tags.div("Post-Repair Checker Results:"), cr)
|
||||
|
||||
def render_maybe_pre_repair_results(self, ctx, data):
|
||||
if data.get_repair_attempted():
|
||||
cr = self._render_results(ctx, data.get_pre_repair_results())
|
||||
return ctx.tag[T.div["Pre-Repair Checker Results:"], cr]
|
||||
@renderer
|
||||
def maybe_pre_repair_results(self, req, tag):
|
||||
if self._results.get_repair_attempted():
|
||||
cr = self._render_results(req, self._results.get_pre_repair_results())
|
||||
return tag(tags.div("Pre-Repair Checker Results:"), cr)
|
||||
return ""
|
||||
|
||||
|
||||
class DeepCheckResultsRenderer(rend.Page, ResultsBase, ReloadMixin):
|
||||
docFactory = getxmlfile("deep-check-results.xhtml")
|
||||
class DeepCheckResultsRenderer(MultiFormatResource):
|
||||
|
||||
formatArgument = "output"
|
||||
|
||||
def __init__(self, client, monitor):
|
||||
self.client = client
|
||||
"""
|
||||
:param allmydata.interfaces.IStatsProducer client: stats provider.
|
||||
:param allmydata.monitor.IMonitor monitor: status, progress, and cancellation provider.
|
||||
"""
|
||||
super(DeepCheckResultsRenderer, self).__init__()
|
||||
self._client = client
|
||||
self.monitor = monitor
|
||||
|
||||
def childFactory(self, ctx, name):
|
||||
def getChild(self, name, req):
|
||||
if not name:
|
||||
return self
|
||||
# /operation/$OPHANDLE/$STORAGEINDEX provides detailed information
|
||||
@ -327,19 +419,18 @@ class DeepCheckResultsRenderer(rend.Page, ResultsBase, ReloadMixin):
|
||||
si = base32.a2b(name)
|
||||
r = self.monitor.get_status()
|
||||
try:
|
||||
return CheckResultsRenderer(self.client,
|
||||
return CheckResultsRenderer(self._client,
|
||||
r.get_results_for_storage_index(si))
|
||||
except KeyError:
|
||||
raise WebError("No detailed results for SI %s" % html.escape(name),
|
||||
http.NOT_FOUND)
|
||||
|
||||
def renderHTTP(self, ctx):
|
||||
if self.want_json(ctx):
|
||||
return self.json(ctx)
|
||||
return rend.Page.renderHTTP(self, ctx)
|
||||
def render_HTML(self, req):
|
||||
elem = DeepCheckResultsRendererElement(self.monitor)
|
||||
return renderElement(req, elem)
|
||||
|
||||
def json(self, ctx):
|
||||
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
||||
def render_JSON(self, req):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
data = {}
|
||||
data["finished"] = self.monitor.is_finished()
|
||||
res = self.monitor.get_status()
|
||||
@ -361,116 +452,170 @@ class DeepCheckResultsRenderer(rend.Page, ResultsBase, ReloadMixin):
|
||||
data["stats"] = res.get_stats()
|
||||
return json.dumps(data, indent=1) + "\n"
|
||||
|
||||
def render_root_storage_index(self, ctx, data):
|
||||
|
||||
class DeepCheckResultsRendererElement(Element, ResultsBase, ReloadMixin):
|
||||
|
||||
loader = XMLFile(FilePath(__file__).sibling("deep-check-results.xhtml"))
|
||||
|
||||
def __init__(self, monitor):
|
||||
super(DeepCheckResultsRendererElement, self).__init__()
|
||||
self.monitor = monitor
|
||||
|
||||
@renderer
|
||||
def root_storage_index(self, req, tag):
|
||||
if not self.monitor.get_status():
|
||||
return ""
|
||||
return self.monitor.get_status().get_root_storage_index_string()
|
||||
|
||||
def data_objects_checked(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-checked"]
|
||||
def data_objects_healthy(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-healthy"]
|
||||
def data_objects_unhealthy(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-unhealthy"]
|
||||
def data_objects_unrecoverable(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-unrecoverable"]
|
||||
def _get_monitor_counter(self, name):
|
||||
if not self.monitor.get_status():
|
||||
return ""
|
||||
return str(self.monitor.get_status().get_counters().get(name))
|
||||
|
||||
def data_count_corrupt_shares(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-corrupt-shares"]
|
||||
@renderer
|
||||
def objects_checked(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-checked")
|
||||
|
||||
def render_problems_p(self, ctx, data):
|
||||
c = self.monitor.get_status().get_counters()
|
||||
if c["count-objects-unhealthy"]:
|
||||
return ctx.tag
|
||||
@renderer
|
||||
def objects_healthy(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-healthy")
|
||||
|
||||
@renderer
|
||||
def objects_unhealthy(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-unhealthy")
|
||||
|
||||
@renderer
|
||||
def objects_unrecoverable(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-unrecoverable")
|
||||
|
||||
@renderer
|
||||
def count_corrupt_shares(self, req, tag):
|
||||
return self._get_monitor_counter("count-corrupt-shares")
|
||||
|
||||
@renderer
|
||||
def problems_p(self, req, tag):
|
||||
if self._get_monitor_counter("count-objects-unhealthy"):
|
||||
return tag
|
||||
return ""
|
||||
|
||||
def data_problems(self, ctx, data):
|
||||
@renderer
|
||||
def problems(self, req, tag):
|
||||
all_objects = self.monitor.get_status().get_all_results()
|
||||
problems = []
|
||||
|
||||
for path in sorted(all_objects.keys()):
|
||||
cr = all_objects[path]
|
||||
assert ICheckResults.providedBy(cr)
|
||||
if not cr.is_healthy():
|
||||
yield path, cr
|
||||
summary_text = ""
|
||||
summary = cr.get_summary()
|
||||
if summary:
|
||||
summary_text = ": " + summary
|
||||
summary_text += " [SI: %s]" % cr.get_storage_index_string()
|
||||
problems.append({
|
||||
# Not sure self._join_pathstring(path) is the
|
||||
# right thing to use here.
|
||||
"problem": self._join_pathstring(path) + self._html(summary_text),
|
||||
})
|
||||
|
||||
def render_problem(self, ctx, data):
|
||||
path, cr = data
|
||||
summary_text = ""
|
||||
summary = cr.get_summary()
|
||||
if summary:
|
||||
summary_text = ": " + summary
|
||||
summary_text += " [SI: %s]" % cr.get_storage_index_string()
|
||||
return ctx.tag[self._join_pathstring(path), self._html(summary_text)]
|
||||
return SlotsSequenceElement(tag, problems)
|
||||
|
||||
|
||||
def render_servers_with_corrupt_shares_p(self, ctx, data):
|
||||
if self.monitor.get_status().get_counters()["count-corrupt-shares"]:
|
||||
return ctx.tag
|
||||
@renderer
|
||||
def servers_with_corrupt_shares_p(self, req, tag):
|
||||
if self._get_monitor_counter("count-corrupt-shares"):
|
||||
return tag
|
||||
return ""
|
||||
|
||||
def data_servers_with_corrupt_shares(self, ctx, data):
|
||||
@renderer
|
||||
def servers_with_corrupt_shares(self, req, tag):
|
||||
servers = [s
|
||||
for (s, storage_index, sharenum)
|
||||
in self.monitor.get_status().get_corrupt_shares()]
|
||||
servers.sort(key=lambda s: s.get_longname())
|
||||
return servers
|
||||
|
||||
def render_server_problem(self, ctx, server):
|
||||
data = [server.get_name()]
|
||||
nickname = server.get_nickname()
|
||||
if nickname:
|
||||
data.append(" (%s)" % self._html(nickname))
|
||||
return ctx.tag[data]
|
||||
problems = []
|
||||
|
||||
for server in servers:
|
||||
name = [server.get_name()]
|
||||
nickname = server.get_nickname()
|
||||
if nickname:
|
||||
name.append(" (%s)" % self._html(nickname))
|
||||
problems.append({"problem": name})
|
||||
|
||||
def render_corrupt_shares_p(self, ctx, data):
|
||||
if self.monitor.get_status().get_counters()["count-corrupt-shares"]:
|
||||
return ctx.tag
|
||||
return SlotsSequenceElement(tag, problems)
|
||||
|
||||
@renderer
|
||||
def corrupt_shares_p(self, req, tag):
|
||||
if self._get_monitor_counter("count-corrupt-shares"):
|
||||
return tag
|
||||
return ""
|
||||
def data_corrupt_shares(self, ctx, data):
|
||||
return self.monitor.get_status().get_corrupt_shares()
|
||||
def render_share_problem(self, ctx, data):
|
||||
server, storage_index, sharenum = data
|
||||
nickname = server.get_nickname()
|
||||
ctx.fillSlots("serverid", server.get_name())
|
||||
if nickname:
|
||||
ctx.fillSlots("nickname", self._html(nickname))
|
||||
ctx.fillSlots("si", self._render_si_link(ctx, storage_index))
|
||||
ctx.fillSlots("shnum", str(sharenum))
|
||||
return ctx.tag
|
||||
|
||||
def render_return(self, ctx, data):
|
||||
req = inevow.IRequest(ctx)
|
||||
@renderer
|
||||
def corrupt_shares(self, req, tag):
|
||||
shares = self.monitor.get_status().get_corrupt_shares()
|
||||
problems = []
|
||||
|
||||
for share in shares:
|
||||
server, storage_index, sharenum = share
|
||||
nickname = server.get_nickname()
|
||||
problem = {
|
||||
"serverid": server.get_name(),
|
||||
"nickname": self._html(nickname),
|
||||
"si": self._render_si_link(req, storage_index),
|
||||
"shnum": str(sharenum),
|
||||
}
|
||||
problems.append(problem)
|
||||
|
||||
return SlotsSequenceElement(tag, problems)
|
||||
|
||||
@renderer
|
||||
def return_to(self, req, tag):
|
||||
return_to = get_arg(req, "return_to", None)
|
||||
if return_to:
|
||||
return T.div[T.a(href=return_to)["Return to file/directory."]]
|
||||
return tags.div(tags.a("Return to file/directory.", href=return_to))
|
||||
return ""
|
||||
|
||||
def data_all_objects(self, ctx, data):
|
||||
r = self.monitor.get_status().get_all_results()
|
||||
for path in sorted(r.keys()):
|
||||
yield (path, r[path])
|
||||
@renderer
|
||||
def all_objects(self, req, tag):
|
||||
results = self.monitor.get_status().get_all_results()
|
||||
objects = []
|
||||
|
||||
def render_object(self, ctx, data):
|
||||
path, r = data
|
||||
ctx.fillSlots("path", self._join_pathstring(path))
|
||||
ctx.fillSlots("healthy", str(r.is_healthy()))
|
||||
ctx.fillSlots("recoverable", str(r.is_recoverable()))
|
||||
storage_index = r.get_storage_index()
|
||||
ctx.fillSlots("storage_index", self._render_si_link(ctx, storage_index))
|
||||
ctx.fillSlots("summary", self._html(r.get_summary()))
|
||||
return ctx.tag
|
||||
for path in sorted(results.keys()):
|
||||
result = results.get(path)
|
||||
storage_index = result.get_storage_index()
|
||||
object = {
|
||||
"path": self._join_pathstring(path),
|
||||
"healthy": str(result.is_healthy()),
|
||||
"recoverable": str(result.is_recoverable()),
|
||||
"storage_index": self._render_si_link(req, storage_index),
|
||||
"summary": self._html(result.get_summary()),
|
||||
}
|
||||
objects.append(object)
|
||||
|
||||
def render_runtime(self, ctx, data):
|
||||
req = inevow.IRequest(ctx)
|
||||
runtime = time.time() - req.processing_started_timestamp
|
||||
return ctx.tag["runtime: %s seconds" % runtime]
|
||||
return SlotsSequenceElement(tag, objects)
|
||||
|
||||
class DeepCheckAndRepairResultsRenderer(rend.Page, ResultsBase, ReloadMixin):
|
||||
docFactory = getxmlfile("deep-check-and-repair-results.xhtml")
|
||||
@renderer
|
||||
def runtime(self, req, tag):
|
||||
runtime = 'unknown'
|
||||
if hasattr(req, 'processing_started_timestamp'):
|
||||
runtime = time.time() - req.processing_started_timestamp
|
||||
return tag("runtime: %s seconds" % runtime)
|
||||
|
||||
|
||||
class DeepCheckAndRepairResultsRenderer(MultiFormatResource):
|
||||
|
||||
formatArgument = "output"
|
||||
|
||||
def __init__(self, client, monitor):
|
||||
self.client = client
|
||||
"""
|
||||
:param allmydata.interfaces.IStatsProducer client: stats provider.
|
||||
:param allmydata.monitor.IMonitor monitor: status, progress, and cancellation provider.
|
||||
"""
|
||||
super(DeepCheckAndRepairResultsRenderer, self).__init__()
|
||||
self._client = client
|
||||
self.monitor = monitor
|
||||
|
||||
def childFactory(self, ctx, name):
|
||||
def getChild(self, name, req):
|
||||
if not name:
|
||||
return self
|
||||
# /operation/$OPHANDLE/$STORAGEINDEX provides detailed information
|
||||
@ -479,18 +624,17 @@ class DeepCheckAndRepairResultsRenderer(rend.Page, ResultsBase, ReloadMixin):
|
||||
s = self.monitor.get_status()
|
||||
try:
|
||||
results = s.get_results_for_storage_index(si)
|
||||
return CheckAndRepairResultsRenderer(self.client, results)
|
||||
return CheckAndRepairResultsRenderer(self._client, results)
|
||||
except KeyError:
|
||||
raise WebError("No detailed results for SI %s" % html.escape(name),
|
||||
http.NOT_FOUND)
|
||||
|
||||
def renderHTTP(self, ctx):
|
||||
if self.want_json(ctx):
|
||||
return self.json(ctx)
|
||||
return rend.Page.renderHTTP(self, ctx)
|
||||
def render_HTML(self, req):
|
||||
elem = DeepCheckAndRepairResultsRendererElement(self.monitor)
|
||||
return renderElement(req, elem)
|
||||
|
||||
def json(self, ctx):
|
||||
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
||||
def render_JSON(self, req):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
res = self.monitor.get_status()
|
||||
data = {}
|
||||
data["finished"] = self.monitor.is_finished()
|
||||
@ -531,119 +675,132 @@ class DeepCheckAndRepairResultsRenderer(rend.Page, ResultsBase, ReloadMixin):
|
||||
data["stats"] = res.get_stats()
|
||||
return json.dumps(data, indent=1) + "\n"
|
||||
|
||||
def render_root_storage_index(self, ctx, data):
|
||||
return self.monitor.get_status().get_root_storage_index_string()
|
||||
|
||||
def data_objects_checked(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-checked"]
|
||||
class DeepCheckAndRepairResultsRendererElement(DeepCheckResultsRendererElement):
|
||||
"""
|
||||
The page generated here has several elements common to "deep check
|
||||
results" page; hence the code reuse.
|
||||
"""
|
||||
|
||||
def data_objects_healthy(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-healthy-pre-repair"]
|
||||
def data_objects_unhealthy(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-unhealthy-pre-repair"]
|
||||
def data_corrupt_shares(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-corrupt-shares-pre-repair"]
|
||||
loader = XMLFile(FilePath(__file__).sibling("deep-check-and-repair-results.xhtml"))
|
||||
|
||||
def data_repairs_attempted(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-repairs-attempted"]
|
||||
def data_repairs_successful(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-repairs-successful"]
|
||||
def data_repairs_unsuccessful(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-repairs-unsuccessful"]
|
||||
def __init__(self, monitor):
|
||||
super(DeepCheckAndRepairResultsRendererElement, self).__init__(monitor)
|
||||
self.monitor = monitor
|
||||
|
||||
def data_objects_healthy_post(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-healthy-post-repair"]
|
||||
def data_objects_unhealthy_post(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-unhealthy-post-repair"]
|
||||
def data_corrupt_shares_post(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-corrupt-shares-post-repair"]
|
||||
@renderer
|
||||
def objects_healthy(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-healthy-pre-repair")
|
||||
|
||||
def render_pre_repair_problems_p(self, ctx, data):
|
||||
c = self.monitor.get_status().get_counters()
|
||||
if c["count-objects-unhealthy-pre-repair"]:
|
||||
return ctx.tag
|
||||
@renderer
|
||||
def objects_unhealthy(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-unhealthy-pre-repair")
|
||||
|
||||
@renderer
|
||||
def corrupt_shares(self, req, tag):
|
||||
return self._get_monitor_counter("count-corrupt-shares-pre-repair")
|
||||
|
||||
@renderer
|
||||
def repairs_attempted(self, req, tag):
|
||||
return self._get_monitor_counter("count-repairs-attempted")
|
||||
|
||||
@renderer
|
||||
def repairs_successful(self, req, tag):
|
||||
return self._get_monitor_counter("count-repairs-successful")
|
||||
|
||||
@renderer
|
||||
def repairs_unsuccessful(self, req, tag):
|
||||
return self._get_monitor_counter("count-repairs-unsuccessful")
|
||||
|
||||
@renderer
|
||||
def objects_healthy_post(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-healthy-post-repair")
|
||||
|
||||
@renderer
|
||||
def objects_unhealthy_post(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-unhealthy-post-repair")
|
||||
|
||||
@renderer
|
||||
def corrupt_shares_post(self, req, tag):
|
||||
return self._get_monitor_counter("count-corrupt-shares-post-repair")
|
||||
|
||||
@renderer
|
||||
def pre_repair_problems_p(self, req, tag):
|
||||
if self._get_monitor_counter("count-objects-unhealthy-pre-repair"):
|
||||
return tag
|
||||
return ""
|
||||
|
||||
def data_pre_repair_problems(self, ctx, data):
|
||||
@renderer
|
||||
def pre_repair_problems(self, req, tag):
|
||||
all_objects = self.monitor.get_status().get_all_results()
|
||||
problems = []
|
||||
|
||||
for path in sorted(all_objects.keys()):
|
||||
r = all_objects[path]
|
||||
assert ICheckAndRepairResults.providedBy(r)
|
||||
cr = r.get_pre_repair_results()
|
||||
if not cr.is_healthy():
|
||||
yield path, cr
|
||||
problem = self._join_pathstring(path), ": ", self._html(cr.get_summary())
|
||||
problems.append({"problem": problem})
|
||||
|
||||
def render_problem(self, ctx, data):
|
||||
path, cr = data
|
||||
return ctx.tag[self._join_pathstring(path), ": ",
|
||||
self._html(cr.get_summary())]
|
||||
return SlotsSequenceElement(tag, problems)
|
||||
|
||||
def render_post_repair_problems_p(self, ctx, data):
|
||||
c = self.monitor.get_status().get_counters()
|
||||
if (c["count-objects-unhealthy-post-repair"]
|
||||
or c["count-corrupt-shares-post-repair"]):
|
||||
return ctx.tag
|
||||
@renderer
|
||||
def post_repair_problems_p(self, req, tag):
|
||||
if (self._get_monitor_counter("count-objects-unhealthy-post-repair")
|
||||
or self._get_monitor_counter("count-corrupt-shares-post-repair")):
|
||||
return tag
|
||||
return ""
|
||||
|
||||
def data_post_repair_problems(self, ctx, data):
|
||||
@renderer
|
||||
def post_repair_problems(self, req, tag):
|
||||
all_objects = self.monitor.get_status().get_all_results()
|
||||
problems = []
|
||||
|
||||
for path in sorted(all_objects.keys()):
|
||||
r = all_objects[path]
|
||||
assert ICheckAndRepairResults.providedBy(r)
|
||||
cr = r.get_post_repair_results()
|
||||
if not cr.is_healthy():
|
||||
yield path, cr
|
||||
problem = self._join_pathstring(path), ": ", self._html(cr.get_summary())
|
||||
problems.append({"problem": problem})
|
||||
|
||||
def render_servers_with_corrupt_shares_p(self, ctx, data):
|
||||
if self.monitor.get_status().get_counters()["count-corrupt-shares-pre-repair"]:
|
||||
return ctx.tag
|
||||
return ""
|
||||
def data_servers_with_corrupt_shares(self, ctx, data):
|
||||
return [] # TODO
|
||||
def render_server_problem(self, ctx, data):
|
||||
pass
|
||||
return SlotsSequenceElement(tag, problems)
|
||||
|
||||
|
||||
def render_remaining_corrupt_shares_p(self, ctx, data):
|
||||
if self.monitor.get_status().get_counters()["count-corrupt-shares-post-repair"]:
|
||||
return ctx.tag
|
||||
return ""
|
||||
def data_post_repair_corrupt_shares(self, ctx, data):
|
||||
return [] # TODO
|
||||
|
||||
def render_share_problem(self, ctx, data):
|
||||
pass
|
||||
|
||||
|
||||
def render_return(self, ctx, data):
|
||||
req = inevow.IRequest(ctx)
|
||||
return_to = get_arg(req, "return_to", None)
|
||||
if return_to:
|
||||
return T.div[T.a(href=return_to)["Return to file/directory."]]
|
||||
@renderer
|
||||
def remaining_corrupt_shares_p(self, req, tag):
|
||||
if self._get_monitor_counter("count-corrupt-shares-post-repair"):
|
||||
return tag
|
||||
return ""
|
||||
|
||||
def data_all_objects(self, ctx, data):
|
||||
r = self.monitor.get_status().get_all_results()
|
||||
for path in sorted(r.keys()):
|
||||
yield (path, r[path])
|
||||
@renderer
|
||||
def post_repair_corrupt_shares(self, req, tag):
|
||||
# TODO: this was not implemented before porting to
|
||||
# twisted.web.template; leaving it as such.
|
||||
#
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3371
|
||||
corrupt = [{"share":"unimplemented"}]
|
||||
return SlotsSequenceElement(tag, corrupt)
|
||||
|
||||
def render_object(self, ctx, data):
|
||||
path, r = data
|
||||
ctx.fillSlots("path", self._join_pathstring(path))
|
||||
ctx.fillSlots("healthy_pre_repair",
|
||||
str(r.get_pre_repair_results().is_healthy()))
|
||||
ctx.fillSlots("recoverable_pre_repair",
|
||||
str(r.get_pre_repair_results().is_recoverable()))
|
||||
ctx.fillSlots("healthy_post_repair",
|
||||
str(r.get_post_repair_results().is_healthy()))
|
||||
storage_index = r.get_storage_index()
|
||||
ctx.fillSlots("storage_index",
|
||||
self._render_si_link(ctx, storage_index))
|
||||
ctx.fillSlots("summary",
|
||||
self._html(r.get_pre_repair_results().get_summary()))
|
||||
return ctx.tag
|
||||
@renderer
|
||||
def all_objects(self, req, tag):
|
||||
results = {}
|
||||
if self.monitor.get_status():
|
||||
results = self.monitor.get_status().get_all_results()
|
||||
objects = []
|
||||
|
||||
for path in sorted(results.keys()):
|
||||
result = results[path]
|
||||
storage_index = result.get_storage_index()
|
||||
obj = {
|
||||
"path": self._join_pathstring(path),
|
||||
"healthy_pre_repair": str(result.get_pre_repair_results().is_healthy()),
|
||||
"recoverable_pre_repair": str(result.get_pre_repair_results().is_recoverable()),
|
||||
"healthy_post_repair": str(result.get_post_repair_results().is_healthy()),
|
||||
"storage_index": self._render_si_link(req, storage_index),
|
||||
"summary": self._html(result.get_pre_repair_results().get_summary()),
|
||||
}
|
||||
objects.append(obj)
|
||||
|
||||
return SlotsSequenceElement(tag, objects)
|
||||
|
||||
def render_runtime(self, ctx, data):
|
||||
req = inevow.IRequest(ctx)
|
||||
runtime = time.time() - req.processing_started_timestamp
|
||||
return ctx.tag["runtime: %s seconds" % runtime]
|
||||
|
@ -1,95 +1,106 @@
|
||||
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
|
||||
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
|
||||
<head>
|
||||
<title>Tahoe-LAFS - Deep Check Results</title>
|
||||
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
|
||||
<link href="/icon.png" rel="shortcut icon" />
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<meta n:render="refresh" />
|
||||
<meta t:render="refresh" />
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h1>Deep-Check-And-Repair Results for root
|
||||
SI=<span n:render="root_storage_index" /></h1>
|
||||
SI=<span t:render="root_storage_index" /></h1>
|
||||
|
||||
<h2 n:render="reload" />
|
||||
<h2 t:render="reload" />
|
||||
|
||||
<p>Counters:</p>
|
||||
<ul>
|
||||
<li>Objects Checked: <span n:render="data" n:data="objects_checked" /></li>
|
||||
<li>Objects Checked: <span><t:transparent t:render="objects_checked" /></span></li>
|
||||
|
||||
<li>Objects Healthy (before repair): <span n:render="data" n:data="objects_healthy" /></li>
|
||||
<li>Objects Unhealthy (before repair): <span n:render="data" n:data="objects_unhealthy" /></li>
|
||||
<li>Corrupt Shares (before repair): <span n:render="data" n:data="corrupt_shares" /></li>
|
||||
<li>Objects Healthy (before repair): <span><t:transparent t:render="objects_healthy" /></span></li>
|
||||
<li>Objects Unhealthy (before repair): <span><t:transparent t:render="objects_unhealthy" /></span></li>
|
||||
<li>Corrupt Shares (before repair): <span><t:transparent t:render="corrupt_shares" /></span></li>
|
||||
|
||||
<li>Repairs Attempted: <span n:render="data" n:data="repairs_attempted" /></li>
|
||||
<li>Repairs Successful: <span n:render="data" n:data="repairs_successful" /></li>
|
||||
<li>Repairs Unsuccessful: <span n:render="data" n:data="repairs_unsuccessful" /></li>
|
||||
<li>Repairs Attempted: <span><t:transparent t:render="repairs_attempted" /></span></li>
|
||||
<li>Repairs Successful: <span><t:transparent t:render="repairs_successful" /></span></li>
|
||||
<li>Repairs Unsuccessful: <span><t:transparent t:render="repairs_unsuccessful" /></span></li>
|
||||
|
||||
<li>Objects Healthy (after repair): <span n:render="data" n:data="objects_healthy_post" /></li>
|
||||
<li>Objects Unhealthy (after repair): <span n:render="data" n:data="objects_unhealthy_post" /></li>
|
||||
<li>Corrupt Shares (after repair): <span n:render="data" n:data="corrupt_shares_post" /></li>
|
||||
<li>Objects Healthy (after repair): <span><t:transparent t:render="objects_healthy_post" /></span></li>
|
||||
<li>Objects Unhealthy (after repair): <span><t:transparent t:render="objects_unhealthy_post" /></span></li>
|
||||
<li>Corrupt Shares (after repair): <span><t:transparent t:render="corrupt_shares_post" /></span></li>
|
||||
|
||||
</ul>
|
||||
|
||||
<div n:render="pre_repair_problems_p">
|
||||
<div t:render="pre_repair_problems_p">
|
||||
<h2>Files/Directories That Had Problems:</h2>
|
||||
|
||||
<ul n:render="sequence" n:data="pre_repair_problems">
|
||||
<li n:pattern="item" n:render="problem"/>
|
||||
<li n:pattern="empty">None</li>
|
||||
<ul t:render="pre_repair_problems">
|
||||
<li t:render="item">
|
||||
<t:slot name="problem" />
|
||||
</li>
|
||||
<li t:render="empty">None</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
|
||||
<div n:render="post_repair_problems_p">
|
||||
<div t:render="post_repair_problems_p">
|
||||
<h2>Files/Directories That Still Have Problems:</h2>
|
||||
<ul n:render="sequence" n:data="post_repair_problems">
|
||||
<li n:pattern="item" n:render="problem"/>
|
||||
<li n:pattern="empty">None</li>
|
||||
<ul t:render="post_repair_problems">
|
||||
<li t:render="item">
|
||||
<t:slot name="problem" />
|
||||
</li>
|
||||
<li t:render="empty">None</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div n:render="servers_with_corrupt_shares_p">
|
||||
<div t:render="servers_with_corrupt_shares_p">
|
||||
<h2>Servers on which corrupt shares were found</h2>
|
||||
<ul n:render="sequence" n:data="servers_with_corrupt_shares">
|
||||
<li n:pattern="item" n:render="server_problem"/>
|
||||
<li n:pattern="empty">None</li>
|
||||
<ul t:render="servers_with_corrupt_shares">
|
||||
<li t:render="item">
|
||||
<t:slot name="problem" />
|
||||
</li>
|
||||
<li t:render="empty">None</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div n:render="remaining_corrupt_shares_p">
|
||||
<div t:render="remaining_corrupt_shares_p">
|
||||
<h2>Remaining Corrupt Shares</h2>
|
||||
<p>These shares need to be manually inspected and removed.</p>
|
||||
<ul n:render="sequence" n:data="post_repair_corrupt_shares">
|
||||
<li n:pattern="item" n:render="share_problem"/>
|
||||
<li n:pattern="empty">None</li>
|
||||
<ul t:render="post_repair_corrupt_shares">
|
||||
<li t:render="item">
|
||||
<t:slot name="share" />
|
||||
</li>
|
||||
<li t:render="empty">None</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div n:render="return" />
|
||||
<div t:render="return_to" />
|
||||
|
||||
<div>
|
||||
<table n:render="sequence" n:data="all_objects">
|
||||
<tr n:pattern="header">
|
||||
<td>Relative Path</td>
|
||||
<td>Healthy Pre-Repair</td>
|
||||
<td>Recoverable Pre-Repair</td>
|
||||
<td>Healthy Post-Repair</td>
|
||||
<td>Storage Index</td>
|
||||
<td>Summary</td>
|
||||
<table t:render="all_objects">
|
||||
<tr t:render="header">
|
||||
<th>Relative Path</th>
|
||||
<th>Healthy Pre-Repair</th>
|
||||
<th>Recoverable Pre-Repair</th>
|
||||
<th>Healthy Post-Repair</th>
|
||||
<th>Storage Index</th>
|
||||
<th>Summary</th>
|
||||
</tr>
|
||||
<tr n:pattern="item" n:render="object">
|
||||
<td><n:slot name="path"/></td>
|
||||
<td><n:slot name="healthy_pre_repair"/></td>
|
||||
<td><n:slot name="recoverable_pre_repair"/></td>
|
||||
<td><n:slot name="healthy_post_repair"/></td>
|
||||
<td><n:slot name="storage_index"/></td>
|
||||
<td><n:slot name="summary"/></td>
|
||||
<tr t:render="item">
|
||||
<td><t:slot name="path"/></td>
|
||||
<td><t:slot name="healthy_pre_repair"/></td>
|
||||
<td><t:slot name="recoverable_pre_repair"/></td>
|
||||
<td><t:slot name="healthy_post_repair"/></td>
|
||||
<td><t:slot name="storage_index"/></td>
|
||||
<td><t:slot name="summary"/></td>
|
||||
</tr>
|
||||
<tr t:render="empty">
|
||||
<td>Nothing to report yet.</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div n:render="runtime" />
|
||||
<div t:render="runtime" />
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
@ -1,87 +1,93 @@
|
||||
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
|
||||
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
|
||||
<head>
|
||||
<title>Tahoe-LAFS - Deep Check Results</title>
|
||||
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
|
||||
<link href="/icon.png" rel="shortcut icon" />
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<meta n:render="refresh" />
|
||||
<meta t:render="refresh" />
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h1>Deep-Check Results for root SI=<span n:render="root_storage_index" /></h1>
|
||||
<h1>Deep-Check Results for root SI=<span t:render="root_storage_index" /></h1>
|
||||
|
||||
<h2 n:render="reload" />
|
||||
<h2 t:render="reload" />
|
||||
|
||||
<p>Counters:</p>
|
||||
<ul>
|
||||
<li>Objects Checked: <span n:render="data" n:data="objects_checked" /></li>
|
||||
<li>Objects Healthy: <span n:render="data" n:data="objects_healthy" /></li>
|
||||
<li>Objects Unhealthy: <span n:render="data" n:data="objects_unhealthy" /></li>
|
||||
<li>Objects Unrecoverable: <span n:render="data" n:data="objects_unrecoverable" /></li>
|
||||
<li>Corrupt Shares: <span n:render="data" n:data="count_corrupt_shares" /></li>
|
||||
|
||||
<li>Objects Checked: <span><t:transparent t:render="objects_checked" /></span></li>
|
||||
<li>Objects Healthy: <span><t:transparent t:render="objects_healthy" /></span></li>
|
||||
<li>Objects Unhealthy: <span><t:transparent t:render="objects_unhealthy" /></span></li>
|
||||
<li>Objects Unrecoverable: <span><t:transparent t:render="objects_unrecoverable" /></span></li>
|
||||
<li>Corrupt Shares: <span><t:transparent t:render="count_corrupt_shares" /></span></li>
|
||||
</ul>
|
||||
|
||||
<div n:render="problems_p">
|
||||
<div t:render="problems_p">
|
||||
<h2>Files/Directories That Had Problems:</h2>
|
||||
|
||||
<ul n:render="sequence" n:data="problems">
|
||||
<li n:pattern="item" n:render="problem"/>
|
||||
<li n:pattern="empty">None</li>
|
||||
<ul t:render="problems">
|
||||
<li t:render="item">
|
||||
<t:slot name="problem" />
|
||||
</li>
|
||||
<li t:render="empty">None</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
|
||||
<div n:render="servers_with_corrupt_shares_p">
|
||||
<div t:render="servers_with_corrupt_shares_p">
|
||||
<h2>Servers on which corrupt shares were found</h2>
|
||||
<ul n:render="sequence" n:data="servers_with_corrupt_shares">
|
||||
<li n:pattern="item" n:render="server_problem"/>
|
||||
<li n:pattern="empty">None</li>
|
||||
<ul t:render="servers_with_corrupt_shares">
|
||||
<li t:render="item">
|
||||
<t:slot name="problem" />
|
||||
</li>
|
||||
<li t:render="empty">None</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div n:render="corrupt_shares_p">
|
||||
<div t:render="corrupt_shares_p">
|
||||
<h2>Corrupt Shares</h2>
|
||||
<p>If repair fails, these shares need to be manually inspected and removed.</p>
|
||||
<table n:render="sequence" n:data="corrupt_shares">
|
||||
<tr n:pattern="header">
|
||||
<td>Server</td>
|
||||
<td>Server Nickname</td>
|
||||
<td>Storage Index</td>
|
||||
<td>Share Number</td>
|
||||
<table t:render="corrupt_shares">
|
||||
<tr t:render="header">
|
||||
<th>Server</th>
|
||||
<th>Server Nickname</th>
|
||||
<th>Storage Index</th>
|
||||
<th>Share Number</th>
|
||||
</tr>
|
||||
<tr n:pattern="item" n:render="share_problem">
|
||||
<td><n:slot name="serverid"/></td>
|
||||
<td><n:slot name="nickname"/></td>
|
||||
<td><n:slot name="si"/></td>
|
||||
<td><n:slot name="shnum"/></td>
|
||||
<tr t:render="item">
|
||||
<td><t:slot name="serverid"/></td>
|
||||
<td><t:slot name="nickname"/></td>
|
||||
<td><t:slot name="si"/></td>
|
||||
<td><t:slot name="shnum"/></td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div n:render="return" />
|
||||
<div t:render="return_to" />
|
||||
|
||||
<div>
|
||||
<h2>All Results</h2>
|
||||
<table n:render="sequence" n:data="all_objects">
|
||||
<tr n:pattern="header">
|
||||
<td>Relative Path</td>
|
||||
<td>Healthy</td>
|
||||
<td>Recoverable</td>
|
||||
<td>Storage Index</td>
|
||||
<td>Summary</td>
|
||||
<table t:render="all_objects">
|
||||
<tr t:render="header">
|
||||
<th>Relative Path</th>
|
||||
<th>Healthy</th>
|
||||
<th>Recoverable</th>
|
||||
<th>Storage Index</th>
|
||||
<th>Summary</th>
|
||||
</tr>
|
||||
<tr n:pattern="item" n:render="object">
|
||||
<td><n:slot name="path"/></td>
|
||||
<td><n:slot name="healthy"/></td>
|
||||
<td><n:slot name="recoverable"/></td>
|
||||
<td><tt><n:slot name="storage_index"/></tt></td>
|
||||
<td><n:slot name="summary"/></td>
|
||||
<tr t:render="item">
|
||||
<td><t:slot name="path"/></td>
|
||||
<td><t:slot name="healthy"/></td>
|
||||
<td><t:slot name="recoverable"/></td>
|
||||
<td><tt><t:slot name="storage_index"/></tt></td>
|
||||
<td><t:slot name="summary"/></td>
|
||||
</tr>
|
||||
<tr t:render="empty">
|
||||
<td>Nothing to report yet.</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div n:render="runtime" />
|
||||
<div t:render="runtime" />
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
@ -261,7 +261,7 @@ class MoreInfoElement(Element):
|
||||
@renderer
|
||||
def deep_check_form(self, req, tag):
|
||||
ophandle = base32.b2a(os.urandom(16))
|
||||
deep_check = T.form(action=".", method="post",
|
||||
deep_check = T.form(action=req.path, method="post",
|
||||
enctype="multipart/form-data")(
|
||||
T.fieldset(
|
||||
T.input(type="hidden", name="t", value="start-deep-check"),
|
||||
@ -287,7 +287,7 @@ class MoreInfoElement(Element):
|
||||
@renderer
|
||||
def deep_size_form(self, req, tag):
|
||||
ophandle = base32.b2a(os.urandom(16))
|
||||
deep_size = T.form(action=".", method="post",
|
||||
deep_size = T.form(action=req.path, method="post",
|
||||
enctype="multipart/form-data")(
|
||||
T.fieldset(
|
||||
T.input(type="hidden", name="t", value="start-deep-size"),
|
||||
@ -300,7 +300,7 @@ class MoreInfoElement(Element):
|
||||
@renderer
|
||||
def deep_stats_form(self, req, tag):
|
||||
ophandle = base32.b2a(os.urandom(16))
|
||||
deep_stats = T.form(action=".", method="post",
|
||||
deep_stats = T.form(action=req.path, method="post",
|
||||
enctype="multipart/form-data")(
|
||||
T.fieldset(
|
||||
T.input(type="hidden", name="t", value="start-deep-stats"),
|
||||
@ -313,7 +313,7 @@ class MoreInfoElement(Element):
|
||||
@renderer
|
||||
def manifest_form(self, req, tag):
|
||||
ophandle = base32.b2a(os.urandom(16))
|
||||
manifest = T.form(action=".", method="post",
|
||||
manifest = T.form(action=req.path, method="post",
|
||||
enctype="multipart/form-data")(
|
||||
T.fieldset(
|
||||
T.input(type="hidden", name="t", value="start-manifest"),
|
||||
|
@ -1,4 +1,4 @@
|
||||
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
|
||||
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
|
||||
<head>
|
||||
<title>Tahoe-LAFS - Check Results</title>
|
||||
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
|
||||
@ -11,7 +11,7 @@
|
||||
|
||||
<div>Literal files are always healthy: their data is contained in the URI</div>
|
||||
|
||||
<div n:render="return" />
|
||||
<div t:render="return_to" />
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
5
tox.ini
5
tox.ini
@ -49,9 +49,8 @@ commands =
|
||||
tahoe --version
|
||||
|
||||
[testenv:py36]
|
||||
# On macOS, git inside of ratchet.sh needs $HOME.
|
||||
passenv = {[testenv]passenv} HOME
|
||||
commands = {toxinidir}/misc/python3/ratchet.sh
|
||||
commands =
|
||||
trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata.test.python3_tests}
|
||||
|
||||
[testenv:integration]
|
||||
setenv =
|
||||
|
Loading…
Reference in New Issue
Block a user