2007-12-18 04:06:07 +00:00
|
|
|
"""
|
|
|
|
this is a load-generating client program. It does all of its work through a
|
|
|
|
given tahoe node (specified by URL), and performs random reads and writes
|
|
|
|
to the target.
|
|
|
|
|
|
|
|
Run this in a directory with the following files:
|
|
|
|
server-URLs : a list of tahoe node URLs (one per line). Each operation
|
|
|
|
will use a randomly-selected server.
|
|
|
|
root.cap: (string) the top-level directory rwcap to use
|
|
|
|
delay: (float) seconds to delay between operations
|
|
|
|
operation-mix: "R/W": two ints, relative frequency of read and write ops
|
|
|
|
#size:?
|
|
|
|
|
|
|
|
Set argv[1] to a per-client stats-NN.out file. This will will be updated with
|
|
|
|
running totals of bytes-per-second and operations-per-second. The stats from
|
|
|
|
multiple clients can be totalled together and averaged over time to compute
|
|
|
|
the traffic being accepted by the grid.
|
|
|
|
|
|
|
|
Each time a 'read' operation is performed, the client will begin at the root
|
|
|
|
and randomly choose a child. If the child is a directory, the client will
|
|
|
|
recurse. If the child is a file, the client will read the contents of the
|
|
|
|
file.
|
|
|
|
|
|
|
|
Each time a 'write' operation is performed, the client will generate a target
|
|
|
|
filename (a random string). 90% of the time, the file will be written into
|
|
|
|
the same directory that was used last time (starting at the root). 10% of the
|
|
|
|
time, a new directory is created by assembling 1 to 5 pathnames chosen at
|
|
|
|
random. The client then writes a certain number of zero bytes to this file.
|
|
|
|
The filesize is determined with something like a power-law distribution, with
|
|
|
|
a mean of 10kB and a max of 100MB, so filesize=min(int(1.0/random(.0002)),1e8)
|
|
|
|
|
|
|
|
|
|
|
|
"""
|
2023-03-24 19:27:51 +00:00
|
|
|
from __future__ import annotations
|
2007-12-18 04:06:07 +00:00
|
|
|
|
|
|
|
import os, sys, httplib, binascii
|
2017-01-19 22:39:53 +00:00
|
|
|
import urllib, json, random, time, urlparse
|
2007-12-18 04:06:07 +00:00
|
|
|
|
2007-12-18 21:07:37 +00:00
|
|
|
if sys.argv[1] == "--stats":
|
|
|
|
statsfiles = sys.argv[2:]
|
|
|
|
# gather stats every 10 seconds, do a moving-window average of the last
|
|
|
|
# 60 seconds
|
|
|
|
DELAY = 10
|
|
|
|
MAXSAMPLES = 6
|
|
|
|
totals = []
|
2023-03-24 19:27:51 +00:00
|
|
|
last_stats : dict[str, float] = {}
|
2007-12-18 21:07:37 +00:00
|
|
|
while True:
|
2023-03-24 19:27:51 +00:00
|
|
|
stats : dict[str, float] = {}
|
2007-12-18 21:07:37 +00:00
|
|
|
for sf in statsfiles:
|
|
|
|
for line in open(sf, "r").readlines():
|
2020-11-20 18:12:52 +00:00
|
|
|
name, str_value = line.split(":")
|
|
|
|
value = int(str_value.strip())
|
2007-12-18 21:07:37 +00:00
|
|
|
if name not in stats:
|
|
|
|
stats[name] = 0
|
|
|
|
stats[name] += float(value)
|
2014-06-23 20:54:07 +00:00
|
|
|
del name
|
2007-12-18 21:07:37 +00:00
|
|
|
if last_stats:
|
2015-07-13 01:25:55 +00:00
|
|
|
delta = dict( [ (n,stats[n]-last_stats[n])
|
|
|
|
for n in stats ] )
|
2019-03-22 16:25:11 +00:00
|
|
|
print("THIS SAMPLE:")
|
2007-12-18 21:07:37 +00:00
|
|
|
for name in sorted(delta.keys()):
|
|
|
|
avg = float(delta[name]) / float(DELAY)
|
2019-03-22 16:25:11 +00:00
|
|
|
print("%20s: %0.2f per second" % (name, avg))
|
2007-12-18 21:07:37 +00:00
|
|
|
totals.append(delta)
|
|
|
|
while len(totals) > MAXSAMPLES:
|
|
|
|
totals.pop(0)
|
|
|
|
|
|
|
|
# now compute average
|
2019-03-22 16:25:11 +00:00
|
|
|
print()
|
|
|
|
print("MOVING WINDOW AVERAGE:")
|
2007-12-18 21:07:37 +00:00
|
|
|
for name in sorted(delta.keys()):
|
|
|
|
avg = sum([ s[name] for s in totals]) / (DELAY*len(totals))
|
2019-03-22 16:25:11 +00:00
|
|
|
print("%20s %0.2f per second" % (name, avg))
|
2007-12-18 21:07:37 +00:00
|
|
|
|
|
|
|
last_stats = stats
|
2019-03-22 16:25:11 +00:00
|
|
|
print()
|
|
|
|
print()
|
2007-12-18 21:07:37 +00:00
|
|
|
time.sleep(DELAY)
|
|
|
|
|
2007-12-18 04:06:07 +00:00
|
|
|
stats_out = sys.argv[1]
|
|
|
|
|
|
|
|
server_urls = []
|
|
|
|
for url in open("server-URLs", "r").readlines():
|
|
|
|
url = url.strip()
|
|
|
|
if url:
|
|
|
|
server_urls.append(url)
|
|
|
|
root = open("root.cap", "r").read().strip()
|
|
|
|
delay = float(open("delay", "r").read().strip())
|
|
|
|
readfreq, writefreq = (
|
|
|
|
[int(x) for x in open("operation-mix", "r").read().strip().split("/")])
|
|
|
|
|
|
|
|
|
2007-12-18 21:07:37 +00:00
|
|
|
files_uploaded = 0
|
|
|
|
files_downloaded = 0
|
|
|
|
bytes_uploaded = 0
|
|
|
|
bytes_downloaded = 0
|
|
|
|
directories_read = 0
|
|
|
|
directories_written = 0
|
2007-12-18 04:06:07 +00:00
|
|
|
|
2010-01-14 20:11:19 +00:00
|
|
|
def listdir(nodeurl, root, remote_pathname):
|
2007-12-18 04:06:07 +00:00
|
|
|
if nodeurl[-1] != "/":
|
|
|
|
nodeurl += "/"
|
|
|
|
url = nodeurl + "uri/%s/" % urllib.quote(root)
|
2010-01-14 20:11:19 +00:00
|
|
|
if remote_pathname:
|
|
|
|
url += urllib.quote(remote_pathname)
|
2007-12-18 04:06:07 +00:00
|
|
|
url += "?t=json"
|
|
|
|
data = urllib.urlopen(url).read()
|
|
|
|
try:
|
2017-01-19 22:39:53 +00:00
|
|
|
parsed = json.loads(data)
|
2007-12-18 04:06:07 +00:00
|
|
|
except ValueError:
|
2019-03-22 16:25:11 +00:00
|
|
|
print("URL was", url)
|
|
|
|
print("DATA was", data)
|
2007-12-18 04:06:07 +00:00
|
|
|
raise
|
|
|
|
nodetype, d = parsed
|
|
|
|
assert nodetype == "dirnode"
|
2007-12-18 21:07:37 +00:00
|
|
|
global directories_read
|
|
|
|
directories_read += 1
|
feat(py3): Convert unicode-only modules to str
Modules that reference `unicode` but do *not* reference `str` can safely be converted to
use `str` in a way that's closest to the way it should be done under Python 3 but that
is still Python 2 compatible [per
`python-future`](https://python-future.org/compatible_idioms.html?highlight=unicode#unicode).
This change results in 4 additional tests passing under Python 3 that weren't before,
one previous test error is now a failure, and more coverage in a few modules. Here's
the diff of the output from running all tests under Python 3 before these changes and
after. I've elided the irrelevant changes (time stamps, object ids, etc.):
```diff
--- .tox/make-test-py3-all-old.log 2020-09-27 20:56:55.761691130 -0700
+++ .tox/make-test-py3-all-new.log 2020-09-27 20:58:16.242075678 -0700
@@ -1,6 +1,6 @@
...
@@ -4218,7 +4218,7 @@
[ERROR]
(#.### secs)
allmydata.test.mutable.test_version.Version.test_download_version ... Traceback (most recent call last):
- File "/home/rpatterson/src/work/sfu/tahoe-lafs/src/allmydata/test/mutable/test_version.py", line 274, in test_download_version
+ File "/home/rpatterson/src/work/sfu/tahoe-lafs/src/allmydata/test/mutable/test_version.py", line 279, in test_download_version
d = self.publish_multiple()
File "/home/rpatterson/src/work/sfu/tahoe-lafs/src/allmydata/test/mutable/util.py", line 372, in publish_multiple
self._nodemaker = make_nodemaker(self._storage)
@@ -4438,40 +4438,26 @@
allmydata.test.test_abbreviate.Abbreviate.test_time ... [OK]
(#.### secs)
allmydata.test.test_auth.AccountFileCheckerKeyTests.test_authenticated ... Traceback (most recent call last):
- File "/home/rpatterson/src/work/sfu/tahoe-lafs/src/allmydata/test/test_auth.py", line 42, in setUp
- abspath = abspath_expanduser_unicode(unicode(self.account_file.path))
-builtins.NameError: name 'unicode' is not defined
+Failure: twisted.cred.error.UnauthorizedLogin:
[ERROR]
(#.### secs)
allmydata.test.test_auth.AccountFileCheckerKeyTests.test_missing_signature ... Traceback (most recent call last):
- File "/home/rpatterson/src/work/sfu/tahoe-lafs/src/allmydata/test/test_auth.py", line 42, in setUp
- abspath = abspath_expanduser_unicode(unicode(self.account_file.path))
-builtins.NameError: name 'unicode' is not defined
-[ERROR]
+ File "/home/rpatterson/src/work/sfu/tahoe-lafs/.tox/py36-coverage/lib/python3.6/site-packages/twisted/trial/_asynctest.py", line 75, in _eb
+ raise self.failureException(output)
+twisted.trial.unittest.FailTest:
+Expected: (<class 'twisted.conch.error.ValidPublicKey'>,)
+Got:
+[Failure instance: Traceback (failure with no frames): <class 'twisted.cred.error.UnauthorizedLogin'>:
+]
+[FAILURE]
(#.### secs)
-allmydata.test.test_auth.AccountFileCheckerKeyTests.test_password_auth_user ... Traceback (most recent call last):
- File "/home/rpatterson/src/work/sfu/tahoe-lafs/src/allmydata/test/test_auth.py", line 42, in setUp
- abspath = abspath_expanduser_unicode(unicode(self.account_file.path))
-builtins.NameError: name 'unicode' is not defined
-[ERROR]
+allmydata.test.test_auth.AccountFileCheckerKeyTests.test_password_auth_user ... [OK]
(#.### secs)
-allmydata.test.test_auth.AccountFileCheckerKeyTests.test_unknown_user ... Traceback (most recent call last):
- File "/home/rpatterson/src/work/sfu/tahoe-lafs/src/allmydata/test/test_auth.py", line 42, in setUp
- abspath = abspath_expanduser_unicode(unicode(self.account_file.path))
-builtins.NameError: name 'unicode' is not defined
-[ERROR]
+allmydata.test.test_auth.AccountFileCheckerKeyTests.test_unknown_user ... [OK]
(#.### secs)
-allmydata.test.test_auth.AccountFileCheckerKeyTests.test_unrecognized_key ... Traceback (most recent call last):
- File "/home/rpatterson/src/work/sfu/tahoe-lafs/src/allmydata/test/test_auth.py", line 42, in setUp
- abspath = abspath_expanduser_unicode(unicode(self.account_file.path))
-builtins.NameError: name 'unicode' is not defined
-[ERROR]
+allmydata.test.test_auth.AccountFileCheckerKeyTests.test_unrecognized_key ... [OK]
(#.### secs)
-allmydata.test.test_auth.AccountFileCheckerKeyTests.test_wrong_signature ... Traceback (most recent call last):
- File "/home/rpatterson/src/work/sfu/tahoe-lafs/src/allmydata/test/test_auth.py", line 42, in setUp
- abspath = abspath_expanduser_unicode(unicode(self.account_file.path))
-builtins.NameError: name 'unicode' is not defined
-[ERROR]
+allmydata.test.test_auth.AccountFileCheckerKeyTests.test_wrong_signature ... [OK]
(#.### secs)
allmydata.test.test_backupdb.BackupDB.test_basic ... [OK]
(#.### secs)
@@ -4615,7 +4601,7 @@
src/allmydata/crypto/util.py 12 2 4 2 75% 13, 32, 12->13, 30->32
src/allmydata/deep_stats.py 83 63 26 0 18% 27-52, 56-58, 62-82, 86-91, 94, 97, 103-114, 117-121, 125-131, 135
src/allmydata/dirnode.py 525 420 178 0 15% 70-103, 112-116, 119-135, 140-143, 146-160, 165-173, 176-177, 180-205, 208-217, 223-229, 248-286, 293-299, 302, 310, 315, 318-324, 327-332, 336-340, 344-346, 355-406, 410, 413, 416, 419, 422, 425, 428, 431-433, 436, 439, 442, 445, 448-450, 453, 457, 459, 464, 469-472, 475-478, 481-484, 489-492, 498-501, 504-507, 510-518, 530-532, 539-555, 558-566, 570-589, 600-610, 613-620, 628-641, 646-652, 657-678, 693-714, 752-761, 765-770, 774-812, 819-820, 825, 828, 831, 836-839, 842-849, 852-853, 862-877, 880-881, 884-891, 894, 897-899
-src/allmydata/frontends/auth.py 100 71 28 0 26% 21-22, 30-48, 51, 54-56, 59-70, 80-87, 100-110, 117-118, 121, 124-142, 147-150, 156-159
+src/allmydata/frontends/auth.py 100 52 28 4 47% 21-22, 38, 41-44, 51, 54-56, 65-70, 80-87, 106-108, 117-118, 121, 124-142, 147-150, 156-159, 37->38, 40->41, 59->65, 101->106
src/allmydata/frontends/ftpd.py 255 254 84 0 1% 4-337
src/allmydata/frontends/sftpd.py 1211 1208 488 0 1% 4-2014
src/allmydata/hashtree.py 174 135 72 1 16% 59, 75-78, 106-108, 114-117, 123-126, 132-136, 142-149, 152-162, 165-169, 172, 175, 180, 183, 186, 218-232, 259-262, 295-306, 320-323, 326-331, 384-484, 58->59
@@ -4653,7 +4639,7 @@
src/allmydata/scripts/admin.py 51 31 2 0 38% 9-14, 17-21, 25, 28, 31-37, 40-46, 56-57, 59, 61-66, 74-78
src/allmydata/scripts/backupdb.py 146 91 14 1 36% 84-91, 94-96, 99, 103, 106, 111-114, 117-119, 122, 125, 128, 176-221, 231-242, 245-263, 266-272, 308-324, 327-333, 336-341, 306->308
src/allmydata/scripts/cli.py 259 124 46 6 46% 25-49, 69-72, 79-81, 103, 142-146, 175, 221-222, 258, 265-266, 284-285, 330-331, 338-341, 346-355, 361-362, 366-373, 388, 405, 417, 432, 449, 479-481, 484-486, 489-491, 494-496, 499-501, 504-515, 518-520, 523-525, 528-530, 533, 536-538, 541-543, 546-548, 551-553, 556-558, 561-563, 566-568, 571-573, 576-577, 60->exit, 61->exit, 174->175, 180->exit, 181->exit, 219->221
-src/allmydata/scripts/common.py 153 74 60 4 48% 64, 82, 88, 100, 114-126, 130-152, 159-163, 168-169, 172, 177, 191-236, 240-241, 47->49, 63->64, 79->82, 87->88
+src/allmydata/scripts/common.py 154 74 60 4 49% 69, 87, 93, 105, 119-131, 135-157, 164-168, 173-174, 177, 182, 196-241, 245-246, 52->54, 68->69, 84->87, 92->93
src/allmydata/scripts/common_http.py 77 58 20 0 20% 15-30, 34-36, 38, 42-83, 87, 90, 94-96, 101
src/allmydata/scripts/create_node.py 302 185 114 8 30% 24, 61-96, 99-111, 114-128, 136-139, 169-174, 191-194, 205-208, 224-229, 235, 242, 256-278, 289-292, 295-298, 329, 339, 347-380, 385-445, 448-450, 455-477, 223->224, 234->235, 241->242, 252->256, 288->289, 294->295, 328->329, 338->339
src/allmydata/scripts/debug.py 719 638 202 0 9% 14, 31-32, 35-49, 52-60, 63-142, 146-154, 157-164, 168-217, 220-304, 307-401, 407, 417, 437-465, 468-485, 488-602, 606, 609-611, 637-648, 653-656, 659, 683-689, 692-810, 813-842, 845-848, 851-865, 869, 888, 891-940, 946, 949-950, 957, 960-961, 967-972, 984-985, 999-1000, 1003-1004, 1020-1021, 1025-1031, 1046-1050
@@ -4661,10 +4647,10 @@
src/allmydata/scripts/run_common.py 135 18 24 6 85% 37, 41-46, 59-60, 149, 158, 192-193, 216-220, 226-227, 55->62, 135->exit, 135->exit, 148->149, 191->192, 225->226
src/allmydata/scripts/runner.py 138 53 42 11 56% 84-85, 91, 97-99, 104, 114, 123-132, 140, 146, 149-160, 174-181, 186, 189-190, 204-232, 248, 255, 31->36, 103->104, 113->114, 139->140, 145->146, 147->149, 185->186, 188->189, 202->204, 247->248, 254->255
src/allmydata/scripts/slow_operation.py 69 56 22 0 14% 15-44, 47-52, 55-61, 64-83
-src/allmydata/scripts/stats_gatherer.py 41 25 10 0 31% 20-25, 62-86
+src/allmydata/scripts/stats_gatherer.py 42 25 10 0 33% 25-30, 67-91
src/allmydata/scripts/tahoe_add_alias.py 106 91 30 0 11% 20-32, 35-59, 63-98, 102-111, 115-144
src/allmydata/scripts/tahoe_backup.py 331 267 85 0 15% 20-35, 38-51, 54-58, 71-73, 76-152, 155-157, 160-161, 164-174, 178-209, 212-242, 246-274, 278-279, 287-311, 322-331, 336, 339, 342-351, 356, 359, 362-367, 372-374, 379, 384, 389, 398, 417-425, 428, 431-461, 469-480, 483-486, 500-504, 511-512, 525, 538-542, 545-549, 552-555, 558-561, 564, 571, 578, 586-594
-src/allmydata/scripts/tahoe_check.py 263 235 121 0 7% 15, 20-100, 103-112, 120-129, 132-167, 170-173, 179-192, 195-256, 259-270, 277-323, 327-336, 339
+src/allmydata/scripts/tahoe_check.py 264 235 121 0 8% 20, 25-105, 108-117, 125-134, 137-172, 175-178, 184-197, 200-261, 264-275, 282-328, 332-341, 344
src/allmydata/scripts/tahoe_cp.py 602 503 226 0 12% 22, 26, 30-31, 34-37, 40-41, 44-47, 50-53, 56-60, 63-70, 75-77, 80, 83, 86, 90-91, 94, 98-99, 102, 106-111, 114, 117-134, 138-142, 145-159, 162-172, 175-177, 180, 185-189, 192, 195-197, 200-203, 206, 210-214, 218-223, 230-233, 236, 239-253, 256-263, 266-297, 303, 307-309, 316, 320-323, 326-333, 336-350, 354-358, 361-397, 403-413, 416-433, 436-437, 440-454, 465-496, 504-580, 583, 589-630, 636-689, 693-698, 701-703, 706-719, 723-762, 765-775, 778-806, 810-818, 821-838, 842, 845-857, 862-863, 867
src/allmydata/scripts/tahoe_get.py 37 32 12 0 10% 9-45
src/allmydata/scripts/tahoe_invite.py 59 41 8 0 27% 27-31, 36-71, 76-101
@@ -4679,7 +4665,7 @@
src/allmydata/scripts/tahoe_stop.py 60 47 10 0 19% 16, 24-84
src/allmydata/scripts/tahoe_unlink.py 28 23 6 0 15% 12-40
src/allmydata/scripts/tahoe_webopen.py 27 24 12 0 8% 7-31
-src/allmydata/stats.py 242 156 54 3 33% 28-34, 37-40, 43-47, 50-64, 67-72, 101, 104-110, 113-125, 144-146, 154-155, 160-163, 169-174, 178-187, 191, 200-207, 210, 213-219, 222-228, 232-234, 237, 241, 246-250, 253, 256-257, 263-278, 281-285, 288-293, 299-325, 100->101, 143->144, 153->154
+src/allmydata/stats.py 242 156 54 3 33% 29-35, 38-41, 44-48, 51-65, 68-73, 102, 105-111, 114-126, 145-147, 155-156, 161-164, 170-175, 179-188, 192, 201-208, 211, 214-220, 223-229, 233-235, 238, 242, 247-251, 254, 257-258, 264-279, 282-286, 289-294, 300-326, 101->102, 144->145, 154->155
src/allmydata/storage/common.py 24 2 4 2 86% 11, 28, 10->11, 36->39
src/allmydata/storage/crawler.py 222 125 64 6 37% 16, 90, 111-113, 148-178, 192-193, 231, 244, 251, 275-312, 315-363, 377-384, 393, 416, 428, 445, 453, 488-492, 495-508, 13->16, 89->90, 96->99, 228->231, 248->251, 268->271
src/allmydata/storage/expirer.py 240 183 81 2 21% 9, 74-79, 119, 122, 125-167, 171-233, 236-253, 256-261, 264-266, 269-274, 280-284, 288-322, 388-435, 7->9, 71->74
@@ -4748,7 +4734,7 @@
src/allmydata/windows/fixups.py 133 133 54 0 0% 1-237
src/allmydata/windows/registry.py 42 42 12 0 0% 1-77
------------------------------------------------------------------------------------------------
-TOTAL 27427 20411 8234 294 22%
+TOTAL 27430 20392 8234 298 22%
18 files skipped due to complete coverage.
+ '[' '!' -z 1 ']'
```
Trac: refs #3448, https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3448
2020-09-27 20:00:19 +00:00
|
|
|
children = dict( [(str(name),value)
|
2008-09-30 22:21:06 +00:00
|
|
|
for (name,value)
|
|
|
|
in d["children"].iteritems()] )
|
|
|
|
return children
|
2007-12-18 04:06:07 +00:00
|
|
|
|
|
|
|
|
|
|
|
def choose_random_descendant(server_url, root, pathname=""):
|
|
|
|
children = listdir(server_url, root, pathname)
|
|
|
|
name = random.choice(children.keys())
|
|
|
|
child = children[name]
|
|
|
|
if pathname:
|
|
|
|
new_pathname = pathname + "/" + name
|
|
|
|
else:
|
|
|
|
new_pathname = name
|
|
|
|
if child[0] == "filenode":
|
|
|
|
return new_pathname
|
|
|
|
return choose_random_descendant(server_url, root, new_pathname)
|
|
|
|
|
|
|
|
def read_and_discard(nodeurl, root, pathname):
|
|
|
|
if nodeurl[-1] != "/":
|
|
|
|
nodeurl += "/"
|
|
|
|
url = nodeurl + "uri/%s/" % urllib.quote(root)
|
|
|
|
if pathname:
|
|
|
|
url += urllib.quote(pathname)
|
|
|
|
f = urllib.urlopen(url)
|
2007-12-18 21:07:37 +00:00
|
|
|
global bytes_downloaded
|
2007-12-18 04:06:07 +00:00
|
|
|
while True:
|
|
|
|
data = f.read(4096)
|
|
|
|
if not data:
|
|
|
|
break
|
2007-12-18 21:07:37 +00:00
|
|
|
bytes_downloaded += len(data)
|
|
|
|
|
2007-12-18 04:06:07 +00:00
|
|
|
|
|
|
|
directories = [
|
|
|
|
"dreamland/disengaging/hucksters",
|
|
|
|
"dreamland/disengaging/klondikes",
|
|
|
|
"dreamland/disengaging/neatly",
|
|
|
|
"dreamland/cottages/richmond",
|
|
|
|
"dreamland/cottages/perhaps",
|
|
|
|
"dreamland/cottages/spies",
|
|
|
|
"dreamland/finder/diversion",
|
|
|
|
"dreamland/finder/cigarette",
|
|
|
|
"dreamland/finder/album",
|
|
|
|
"hazing/licences/comedian",
|
|
|
|
"hazing/licences/goat",
|
|
|
|
"hazing/licences/shopkeeper",
|
|
|
|
"hazing/regiment/frigate",
|
|
|
|
"hazing/regiment/quackery",
|
|
|
|
"hazing/regiment/centerpiece",
|
|
|
|
"hazing/disassociate/mob",
|
|
|
|
"hazing/disassociate/nihilistic",
|
|
|
|
"hazing/disassociate/bilbo",
|
|
|
|
]
|
|
|
|
|
|
|
|
def create_random_directory():
|
|
|
|
d = random.choice(directories)
|
|
|
|
pieces = d.split("/")
|
|
|
|
numsegs = random.randint(1, len(pieces))
|
|
|
|
return "/".join(pieces[0:numsegs])
|
|
|
|
|
|
|
|
def generate_filename():
|
|
|
|
fn = binascii.hexlify(os.urandom(4))
|
|
|
|
return fn
|
|
|
|
|
|
|
|
def choose_size():
|
|
|
|
mean = 10e3
|
|
|
|
size = random.expovariate(1.0 / mean)
|
|
|
|
return int(min(size, 100e6))
|
|
|
|
|
|
|
|
# copied from twisted/web/client.py
|
|
|
|
def parse_url(url, defaultPort=None):
|
|
|
|
url = url.strip()
|
|
|
|
parsed = urlparse.urlparse(url)
|
|
|
|
scheme = parsed[0]
|
|
|
|
path = urlparse.urlunparse(('','')+parsed[2:])
|
|
|
|
if defaultPort is None:
|
|
|
|
if scheme == 'https':
|
|
|
|
defaultPort = 443
|
|
|
|
else:
|
|
|
|
defaultPort = 80
|
|
|
|
host, port = parsed[1], defaultPort
|
|
|
|
if ':' in host:
|
|
|
|
host, port = host.split(':')
|
|
|
|
port = int(port)
|
|
|
|
if path == "":
|
|
|
|
path = "/"
|
|
|
|
return scheme, host, port, path
|
|
|
|
|
2010-01-14 20:11:19 +00:00
|
|
|
def generate_and_put(nodeurl, root, remote_filename, size):
|
2007-12-18 04:06:07 +00:00
|
|
|
if nodeurl[-1] != "/":
|
|
|
|
nodeurl += "/"
|
|
|
|
url = nodeurl + "uri/%s/" % urllib.quote(root)
|
2010-01-14 20:11:19 +00:00
|
|
|
url += urllib.quote(remote_filename)
|
2007-12-18 04:06:07 +00:00
|
|
|
|
|
|
|
scheme, host, port, path = parse_url(url)
|
|
|
|
if scheme == "http":
|
|
|
|
c = httplib.HTTPConnection(host, port)
|
|
|
|
elif scheme == "https":
|
|
|
|
c = httplib.HTTPSConnection(host, port)
|
|
|
|
else:
|
|
|
|
raise ValueError("unknown scheme '%s', need http or https" % scheme)
|
|
|
|
c.putrequest("PUT", path)
|
|
|
|
c.putheader("Hostname", host)
|
|
|
|
c.putheader("User-Agent", "tahoe-check-load")
|
|
|
|
c.putheader("Connection", "close")
|
|
|
|
c.putheader("Content-Length", "%d" % size)
|
|
|
|
c.endheaders()
|
2007-12-18 21:07:37 +00:00
|
|
|
global bytes_uploaded
|
2007-12-18 04:06:07 +00:00
|
|
|
while size:
|
|
|
|
chunksize = min(size, 4096)
|
|
|
|
size -= chunksize
|
|
|
|
c.send("\x00" * chunksize)
|
2007-12-18 21:07:37 +00:00
|
|
|
bytes_uploaded += chunksize
|
2007-12-18 04:06:07 +00:00
|
|
|
return c.getresponse()
|
|
|
|
|
|
|
|
|
|
|
|
current_writedir = ""
|
|
|
|
|
|
|
|
while True:
|
|
|
|
time.sleep(delay)
|
2007-12-18 21:07:37 +00:00
|
|
|
if random.uniform(0, readfreq+writefreq) < readfreq:
|
2007-12-18 04:06:07 +00:00
|
|
|
op = "read"
|
|
|
|
else:
|
|
|
|
op = "write"
|
2019-03-22 16:25:11 +00:00
|
|
|
print("OP:", op)
|
2007-12-18 04:06:07 +00:00
|
|
|
server = random.choice(server_urls)
|
|
|
|
if op == "read":
|
|
|
|
pathname = choose_random_descendant(server, root)
|
2019-03-22 16:25:11 +00:00
|
|
|
print(" reading", pathname)
|
2007-12-18 04:06:07 +00:00
|
|
|
read_and_discard(server, root, pathname)
|
2007-12-18 21:07:37 +00:00
|
|
|
files_downloaded += 1
|
2007-12-18 04:06:07 +00:00
|
|
|
elif op == "write":
|
|
|
|
if random.uniform(0, 100) < 10:
|
|
|
|
current_writedir = create_random_directory()
|
|
|
|
filename = generate_filename()
|
|
|
|
if current_writedir:
|
|
|
|
pathname = current_writedir + "/" + filename
|
|
|
|
else:
|
|
|
|
pathname = filename
|
2019-03-22 16:25:11 +00:00
|
|
|
print(" writing", pathname)
|
2007-12-18 04:06:07 +00:00
|
|
|
size = choose_size()
|
2019-03-22 16:25:11 +00:00
|
|
|
print(" size", size)
|
2007-12-18 04:06:07 +00:00
|
|
|
generate_and_put(server, root, pathname, size)
|
2007-12-18 21:07:37 +00:00
|
|
|
files_uploaded += 1
|
|
|
|
|
|
|
|
f = open(stats_out+".tmp", "w")
|
|
|
|
f.write("files-uploaded: %d\n" % files_uploaded)
|
|
|
|
f.write("files-downloaded: %d\n" % files_downloaded)
|
|
|
|
f.write("bytes-uploaded: %d\n" % bytes_uploaded)
|
|
|
|
f.write("bytes-downloaded: %d\n" % bytes_downloaded)
|
|
|
|
f.write("directories-read: %d\n" % directories_read)
|
|
|
|
f.write("directories-written: %d\n" % directories_written)
|
|
|
|
f.close()
|
|
|
|
os.rename(stats_out+".tmp", stats_out)
|