hush pyflakes-0.4.0 warnings: remove trivial unused variables. For #900.

This commit is contained in:
Brian Warner
2010-01-14 14:15:29 -08:00
parent 874a979a8e
commit 731d15e56f
43 changed files with 47 additions and 112 deletions

View File

@ -413,7 +413,6 @@ class DirectoryNode:
def set_children(self, entries, overwrite=True): def set_children(self, entries, overwrite=True):
# this takes URIs # this takes URIs
a = Adder(self, overwrite=overwrite) a = Adder(self, overwrite=overwrite)
node_entries = []
for (name, e) in entries.iteritems(): for (name, e) in entries.iteritems():
assert isinstance(name, unicode) assert isinstance(name, unicode)
if len(e) == 2: if len(e) == 2:

View File

@ -82,9 +82,9 @@ class ValidatedThingObtainer:
self._log_id = log_id self._log_id = log_id
def _bad(self, f, validatedthingproxy): def _bad(self, f, validatedthingproxy):
failtype = f.trap(RemoteException, DeadReferenceError, f.trap(RemoteException, DeadReferenceError,
IntegrityCheckReject, layout.LayoutInvalid, IntegrityCheckReject, layout.LayoutInvalid,
layout.ShareVersionIncompatible) layout.ShareVersionIncompatible)
level = log.WEIRD level = log.WEIRD
if f.check(DeadReferenceError): if f.check(DeadReferenceError):
level = log.UNUSUAL level = log.UNUSUAL
@ -598,9 +598,9 @@ class BlockDownloader(log.PrefixingLogMixin):
self.parent.hold_block(self.blocknum, data) self.parent.hold_block(self.blocknum, data)
def _got_block_error(self, f): def _got_block_error(self, f):
failtype = f.trap(RemoteException, DeadReferenceError, f.trap(RemoteException, DeadReferenceError,
IntegrityCheckReject, IntegrityCheckReject, layout.LayoutInvalid,
layout.LayoutInvalid, layout.ShareVersionIncompatible) layout.ShareVersionIncompatible)
if f.check(RemoteException, DeadReferenceError): if f.check(RemoteException, DeadReferenceError):
level = log.UNUSUAL level = log.UNUSUAL
else: else:

View File

@ -777,7 +777,6 @@ class CHKUploader:
for shnum in self._encoder.get_shares_placed(): for shnum in self._encoder.get_shares_placed():
peer_tracker = self._peer_trackers[shnum] peer_tracker = self._peer_trackers[shnum]
peerid = peer_tracker.peerid peerid = peer_tracker.peerid
peerid_s = idlib.shortnodeid_b2a(peerid)
r.sharemap.add(shnum, peerid) r.sharemap.add(shnum, peerid)
r.servermap.add(peerid, shnum) r.servermap.add(peerid, shnum)
r.pushed_shares = len(self._encoder.get_shares_placed()) r.pushed_shares = len(self._encoder.get_shares_placed())

View File

@ -121,7 +121,6 @@ class ResponseCache:
# we have a fragment that contains the whole request # we have a fragment that contains the whole request
index = (verinfo, shnum) index = (verinfo, shnum)
end = offset+length
for entry in self.cache.get(index, set()): for entry in self.cache.get(index, set()):
(e_start, e_data, e_timestamp) = entry (e_start, e_data, e_timestamp) = entry
if self._inside(offset, length, e_start, len(e_data)): if self._inside(offset, length, e_start, len(e_data)):

View File

@ -166,7 +166,7 @@ def pack_offsets(verification_key_length, signature_length,
o3 = offsets['block_hash_tree'] = o2 + share_hash_chain_length o3 = offsets['block_hash_tree'] = o2 + share_hash_chain_length
o4 = offsets['share_data'] = o3 + block_hash_tree_length o4 = offsets['share_data'] = o3 + block_hash_tree_length
o5 = offsets['enc_privkey'] = o4 + share_data_length o5 = offsets['enc_privkey'] = o4 + share_data_length
o6 = offsets['EOF'] = o5 + encprivkey_length offsets['EOF'] = o5 + encprivkey_length
return struct.pack(">LLLLQQ", return struct.pack(">LLLLQQ",
offsets['signature'], offsets['signature'],

View File

@ -374,7 +374,6 @@ class Publish:
str(self._first_write_error), str(self._first_write_error),
self._first_write_error) self._first_write_error)
new_assignments = []
# we then index this peerlist with an integer, because we may have to # we then index this peerlist with an integer, because we may have to
# wrap. We update the goal as we go. # wrap. We update the goal as we go.
i = 0 i = 0
@ -465,7 +464,7 @@ class Publish:
# build the block hash tree. SDMF has only one leaf. # build the block hash tree. SDMF has only one leaf.
leaves = [hashutil.block_hash(share_data)] leaves = [hashutil.block_hash(share_data)]
t = hashtree.HashTree(leaves) t = hashtree.HashTree(leaves)
block_hash_trees[shnum] = block_hash_tree = list(t) block_hash_trees[shnum] = list(t)
share_hash_leaves[shnum] = t[0] share_hash_leaves[shnum] = t[0]
for leaf in share_hash_leaves: for leaf in share_hash_leaves:
assert leaf is not None assert leaf is not None

View File

@ -500,7 +500,6 @@ class ServermapUpdater:
self._status.set_status("Sending %d initial queries" % len(peerlist)) self._status.set_status("Sending %d initial queries" % len(peerlist))
self._queries_outstanding = set() self._queries_outstanding = set()
self._sharemap = DictOfSets() # shnum -> [(peerid, seqnum, R)..] self._sharemap = DictOfSets() # shnum -> [(peerid, seqnum, R)..]
dl = []
for (peerid, ss) in peerlist.items(): for (peerid, ss) in peerlist.items():
self._queries_outstanding.add(peerid) self._queries_outstanding.add(peerid)
self._do_query(ss, peerid, self._storage_index, self._read_size) self._do_query(ss, peerid, self._storage_index, self._read_size)

View File

@ -57,7 +57,6 @@ class ProvisioningTool(rend.Page):
i_select = T.select(name=name) i_select = T.select(name=name)
for (count, description) in options: for (count, description) in options:
count = astype(count) count = astype(count)
selected = False
if ((current_value is not None and count == current_value) or if ((current_value is not None and count == current_value) or
(current_value is None and count == default)): (current_value is None and count == default)):
o = T.option(value=str(count), selected="true")[description] o = T.option(value=str(count), selected="true")[description]
@ -340,7 +339,6 @@ class ProvisioningTool(rend.Page):
add_output("Users", add_output("Users",
T.div["Average file size: ", number(file_size)]) T.div["Average file size: ", number(file_size)])
total_files = num_users * files_per_user / sharing_ratio total_files = num_users * files_per_user / sharing_ratio
user_file_check_interval = file_check_interval / files_per_user
add_output("Grid", add_output("Grid",
T.div["Total number of files in grid: ", T.div["Total number of files in grid: ",
@ -711,6 +709,7 @@ class ProvisioningTool(rend.Page):
from allmydata import reliability from allmydata import reliability
# we import this just to test to see if the page is available # we import this just to test to see if the page is available
_hush_pyflakes = reliability _hush_pyflakes = reliability
del _hush_pyflakes
f = [T.div[T.a(href="../reliability")["Reliability Math"]], f] f = [T.div[T.a(href="../reliability")["Reliability Math"]], f]
except ImportError: except ImportError:
pass pass

View File

@ -79,7 +79,6 @@ class ReliabilityModel:
#print "DIFF:", (old_post_repair - decay * repair) #print "DIFF:", (old_post_repair - decay * repair)
START = array([0]*N + [1]) START = array([0]*N + [1])
ALIVE = array([0]*k + [1]*(1+N-k))
DEAD = array([1]*k + [0]*(1+N-k)) DEAD = array([1]*k + [0]*(1+N-k))
REPAIRp = array([0]*k + [1]*(R-k) + [0]*(1+N-R)) REPAIRp = array([0]*k + [1]*(R-k) + [0]*(1+N-R))
REPAIR_newshares = array([0]*k + REPAIR_newshares = array([0]*k +
@ -87,7 +86,6 @@ class ReliabilityModel:
[0]*(1+N-R)) [0]*(1+N-R))
assert REPAIR_newshares.shape[0] == N+1 assert REPAIR_newshares.shape[0] == N+1
#print "START", START #print "START", START
#print "ALIVE", ALIVE
#print "REPAIRp", REPAIRp #print "REPAIRp", REPAIRp
#print "REPAIR_newshares", REPAIR_newshares #print "REPAIR_newshares", REPAIR_newshares

View File

@ -553,10 +553,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
# mutable share # mutable share
m = MutableShareFile(abs_sharefile) m = MutableShareFile(abs_sharefile)
WE, nodeid = m._read_write_enabler_and_nodeid(f) WE, nodeid = m._read_write_enabler_and_nodeid(f)
num_extra_leases = m._read_num_extra_leases(f)
data_length = m._read_data_length(f) data_length = m._read_data_length(f)
extra_lease_offset = m._read_extra_lease_offset(f)
container_size = extra_lease_offset - m.DATA_OFFSET
expiration_time = min( [lease.expiration_time expiration_time = min( [lease.expiration_time
for (i,lease) in m._enumerate_leases(f)] ) for (i,lease) in m._enumerate_leases(f)] )
expiration = max(0, expiration_time - now) expiration = max(0, expiration_time - now)

View File

@ -27,7 +27,6 @@ g.setServiceParent(application)
""" """
def create_stats_gatherer(config): def create_stats_gatherer(config):
out = config.stdout
err = config.stderr err = config.stderr
basedir = config['basedir'] basedir = config['basedir']
if not basedir: if not basedir:

View File

@ -72,14 +72,11 @@ class BackerUpper:
def run(self): def run(self):
options = self.options options = self.options
nodeurl = options['node-url'] nodeurl = options['node-url']
from_dir = options.from_dir
to_dir = options.to_dir
self.verbosity = 1 self.verbosity = 1
if options['quiet']: if options['quiet']:
self.verbosity = 0 self.verbosity = 0
if options['verbose']: if options['verbose']:
self.verbosity = 2 self.verbosity = 2
stdin = options.stdin
stdout = options.stdout stdout = options.stdout
stderr = options.stderr stderr = options.stderr
@ -101,7 +98,6 @@ class BackerUpper:
to_url += "/" to_url += "/"
archives_url = to_url + "Archives/" archives_url = to_url + "Archives/"
latest_url = to_url + "Latest"
# first step: make sure the target directory exists, as well as the # first step: make sure the target directory exists, as well as the
# Archives/ subdirectory. # Archives/ subdirectory.
@ -112,11 +108,6 @@ class BackerUpper:
print >>stderr, "Unable to create target directory: %s %s %s" % \ print >>stderr, "Unable to create target directory: %s %s %s" % \
(resp.status, resp.reason, resp.read()) (resp.status, resp.reason, resp.read())
return 1 return 1
archives_dir = {}
else:
jdata = simplejson.load(resp)
(otype, attrs) = jdata
archives_dir = attrs["children"]
# second step: process the tree # second step: process the tree
new_backup_dircap = self.process(options.from_dir) new_backup_dircap = self.process(options.from_dir)

View File

@ -530,7 +530,6 @@ class Copier:
url = self.nodeurl + "uri/%s" % urllib.quote(rootcap) url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
if path: if path:
url += "/" + escape_path(path) url += "/" + escape_path(path)
last_slash = path.rfind("/")
resp = do_http("GET", url + "?t=json") resp = do_http("GET", url + "?t=json")
if resp.status == 404: if resp.status == 404:

View File

@ -93,7 +93,6 @@ class StatsGrabber(SlowOperationRunner):
def write_results(self, data): def write_results(self, data):
stdout = self.options.stdout stdout = self.options.stdout
stderr = self.options.stderr
keys = ("count-immutable-files", keys = ("count-immutable-files",
"count-mutable-files", "count-mutable-files",
"count-literal-files", "count-literal-files",

View File

@ -5,17 +5,11 @@ from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path
def rm(options): def rm(options):
""" """
@param verbosity: 0, 1, or 2, meaning quiet, verbose, or very verbose
@return: a Deferred which eventually fires with the exit code @return: a Deferred which eventually fires with the exit code
""" """
nodeurl = options['node-url'] nodeurl = options['node-url']
aliases = options.aliases aliases = options.aliases
where = options.where where = options.where
if options['quiet']:
verbosity = 0
else:
verbosity = 2
stdout = options.stdout stdout = options.stdout
stderr = options.stderr stderr = options.stderr

View File

@ -258,7 +258,6 @@ class ShareCrawler(service.MultiService):
self.current_sleep_time = None self.current_sleep_time = None
self.next_wake_time = None self.next_wake_time = None
try: try:
s = self.last_complete_prefix_index
self.start_current_prefix(start_slice) self.start_current_prefix(start_slice)
finished_cycle = True finished_cycle = True
except TimeSliceExceeded: except TimeSliceExceeded:

View File

@ -76,7 +76,7 @@ class StorageServer(service.MultiService, Referenceable):
self._clean_incomplete() self._clean_incomplete()
fileutil.make_dirs(self.incomingdir) fileutil.make_dirs(self.incomingdir)
self._active_writers = weakref.WeakKeyDictionary() self._active_writers = weakref.WeakKeyDictionary()
lp = log.msg("StorageServer created", facility="tahoe.storage") log.msg("StorageServer created", facility="tahoe.storage")
if reserved_space: if reserved_space:
if self.get_available_space() is None: if self.get_available_space() is None:
@ -479,7 +479,7 @@ class StorageServer(service.MultiService, Referenceable):
start = time.time() start = time.time()
self.count("writev") self.count("writev")
si_s = si_b2a(storage_index) si_s = si_b2a(storage_index)
lp = log.msg("storage: slot_writev %s" % si_s) log.msg("storage: slot_writev %s" % si_s)
si_dir = storage_index_to_dir(storage_index) si_dir = storage_index_to_dir(storage_index)
(write_enabler, renew_secret, cancel_secret) = secrets (write_enabler, renew_secret, cancel_secret) = secrets
# shares exist if there is a file for them # shares exist if there is a file for them

View File

@ -95,7 +95,6 @@ class SpeedTest:
def do_test(self): def do_test(self):
print "doing test" print "doing test"
rr = self.client_rref
d = defer.succeed(None) d = defer.succeed(None)
d.addCallback(self.one_test, "startup", 1, 1000, False) #ignore this one d.addCallback(self.one_test, "startup", 1, 1000, False) #ignore this one
d.addCallback(self.measure_rtt) d.addCallback(self.measure_rtt)

View File

@ -1335,18 +1335,11 @@ def _corrupt_crypttext_hash_tree(data, debug=False):
return corrupt_field(data, 0x0c+crypttexthashtreeoffset, blockhashesoffset-crypttexthashtreeoffset, debug=debug) return corrupt_field(data, 0x0c+crypttexthashtreeoffset, blockhashesoffset-crypttexthashtreeoffset, debug=debug)
def _corrupt_crypttext_hash_tree_byte_x221(data, debug=False): def _corrupt_crypttext_hash_tree_byte_x221(data, debug=False):
"""Scramble the file data -- the field containing the crypttext hash tree """Scramble the file data -- the byte at offset 0x221 will have its 7th
will have the 7th bit of the 9th byte flipped. (b1) bit flipped.
""" """
sharevernum = struct.unpack(">L", data[0x0c:0x0c+4])[0] sharevernum = struct.unpack(">L", data[0x0c:0x0c+4])[0]
assert sharevernum in (1, 2), "This test is designed to corrupt immutable shares of v1 or v2 in specific ways." assert sharevernum in (1, 2), "This test is designed to corrupt immutable shares of v1 or v2 in specific ways."
if sharevernum == 1:
crypttexthashtreeoffset = struct.unpack(">L", data[0x0c+0x14:0x0c+0x14+4])[0]
blockhashesoffset = struct.unpack(">L", data[0x0c+0x18:0x0c+0x18+4])[0]
else:
crypttexthashtreeoffset = struct.unpack(">Q", data[0x0c+0x24:0x0c+0x24+8])[0]
blockhashesoffset = struct.unpack(">Q", data[0x0c+0x2c:0x0c+0x2c+8])[0]
if debug: if debug:
log.msg("original data: %r" % (data,)) log.msg("original data: %r" % (data,))
return data[:0x0c+0x221] + chr(ord(data[0x0c+0x221])^0x02) + data[0x0c+0x2210+1:] return data[:0x0c+0x221] + chr(ord(data[0x0c+0x221])^0x02) + data[0x0c+0x2210+1:]

View File

@ -32,7 +32,7 @@ class Basic(unittest.TestCase):
basedir = "test_client.Basic.test_loadable" basedir = "test_client.Basic.test_loadable"
os.mkdir(basedir) os.mkdir(basedir)
open(os.path.join(basedir, "introducer.furl"), "w").write("") open(os.path.join(basedir, "introducer.furl"), "w").write("")
c = client.Client(basedir) client.Client(basedir)
def test_loadable_old_config_bits(self): def test_loadable_old_config_bits(self):
basedir = "test_client.Basic.test_loadable_old_config_bits" basedir = "test_client.Basic.test_loadable_old_config_bits"
@ -190,7 +190,7 @@ class Run(unittest.TestCase, testutil.StallMixin):
dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus" dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus"
open(os.path.join(basedir, "introducer.furl"), "w").write(dummy) open(os.path.join(basedir, "introducer.furl"), "w").write(dummy)
open(os.path.join(basedir, "suicide_prevention_hotline"), "w") open(os.path.join(basedir, "suicide_prevention_hotline"), "w")
c = client.Client(basedir) client.Client(basedir)
def test_reloadable(self): def test_reloadable(self):
basedir = "test_client.Run.test_reloadable" basedir = "test_client.Run.test_reloadable"

View File

@ -341,7 +341,8 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
ss = StorageServer(self.basedir, serverid) ss = StorageServer(self.basedir, serverid)
ss.setServiceParent(self.s) ss.setServiceParent(self.s)
sis = [self.write(i, ss, serverid) for i in range(10)] for i in range(10):
self.write(i, ss, serverid)
statefile = os.path.join(self.basedir, "statefile") statefile = os.path.join(self.basedir, "statefile")
c = ConsumingCrawler(ss, statefile) c = ConsumingCrawler(ss, statefile)
@ -385,7 +386,8 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
ss = StorageServer(self.basedir, serverid) ss = StorageServer(self.basedir, serverid)
ss.setServiceParent(self.s) ss.setServiceParent(self.s)
sis = [self.write(i, ss, serverid) for i in range(10)] for i in range(10):
self.write(i, ss, serverid)
statefile = os.path.join(self.basedir, "statefile") statefile = os.path.join(self.basedir, "statefile")
c = ShareCrawler(ss, statefile) c = ShareCrawler(ss, statefile)
@ -412,7 +414,8 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
ss = StorageServer(self.basedir, serverid) ss = StorageServer(self.basedir, serverid)
ss.setServiceParent(self.s) ss.setServiceParent(self.s)
sis = [self.write(i, ss, serverid) for i in range(30)] for i in range(30):
self.write(i, ss, serverid)
statefile = os.path.join(self.basedir, "statefile") statefile = os.path.join(self.basedir, "statefile")
c = OneShotCrawler(ss, statefile) c = OneShotCrawler(ss, statefile)

View File

@ -38,7 +38,6 @@ class MutableChecker(GridTestMixin, unittest.TestCase, ErrorMixin):
def _created(node): def _created(node):
self.node = node self.node = node
self.fileurl = "uri/" + urllib.quote(node.get_uri()) self.fileurl = "uri/" + urllib.quote(node.get_uri())
si = self.node.get_storage_index()
d.addCallback(_created) d.addCallback(_created)
# now make sure the webapi verifier sees no problems # now make sure the webapi verifier sees no problems
d.addCallback(lambda ign: self.GET(self.fileurl+"?t=check&verify=true", d.addCallback(lambda ign: self.GET(self.fileurl+"?t=check&verify=true",
@ -395,13 +394,14 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
self.check_stats_good(stats) self.check_stats_good(stats)
def do_web_stream_check(self, ignored): def do_web_stream_check(self, ignored):
# TODO
return return
d = self.web(self.root, t="stream-deep-check") d = self.web(self.root, t="stream-deep-check")
def _check(res): def _check(res):
units = list(self.parse_streamed_json(res)) units = list(self.parse_streamed_json(res))
files = [u for u in units if u["type"] in ("file", "directory")] #files = [u for u in units if u["type"] in ("file", "directory")]
assert units[-1]["type"] == "stats" assert units[-1]["type"] == "stats"
stats = units[-1]["stats"] #stats = units[-1]["stats"]
# ... # ...
d.addCallback(_check) d.addCallback(_check)
return d return d

View File

@ -294,7 +294,6 @@ class Dirnode(GridTestMixin, unittest.TestCase,
return d return d
def _mark_file_bad(self, rootnode): def _mark_file_bad(self, rootnode):
si = rootnode.get_storage_index()
self.delete_shares_numbered(rootnode.get_uri(), [0]) self.delete_shares_numbered(rootnode.get_uri(), [0])
return rootnode return rootnode
@ -976,25 +975,25 @@ class Packing(unittest.TestCase):
self.failUnlessIn("lit", packed) self.failUnlessIn("lit", packed)
kids = self._make_kids(nm, ["imm", "lit", "write"]) kids = self._make_kids(nm, ["imm", "lit", "write"])
e = self.failUnlessRaises(dirnode.MustBeDeepImmutable, self.failUnlessRaises(dirnode.MustBeDeepImmutable,
dirnode.pack_children, dirnode.pack_children,
fn, kids, deep_immutable=True) fn, kids, deep_immutable=True)
# read-only is not enough: all children must be immutable # read-only is not enough: all children must be immutable
kids = self._make_kids(nm, ["imm", "lit", "read"]) kids = self._make_kids(nm, ["imm", "lit", "read"])
e = self.failUnlessRaises(dirnode.MustBeDeepImmutable, self.failUnlessRaises(dirnode.MustBeDeepImmutable,
dirnode.pack_children, dirnode.pack_children,
fn, kids, deep_immutable=True) fn, kids, deep_immutable=True)
kids = self._make_kids(nm, ["imm", "lit", "dirwrite"]) kids = self._make_kids(nm, ["imm", "lit", "dirwrite"])
e = self.failUnlessRaises(dirnode.MustBeDeepImmutable, self.failUnlessRaises(dirnode.MustBeDeepImmutable,
dirnode.pack_children, dirnode.pack_children,
fn, kids, deep_immutable=True) fn, kids, deep_immutable=True)
kids = self._make_kids(nm, ["imm", "lit", "dirread"]) kids = self._make_kids(nm, ["imm", "lit", "dirread"])
e = self.failUnlessRaises(dirnode.MustBeDeepImmutable, self.failUnlessRaises(dirnode.MustBeDeepImmutable,
dirnode.pack_children, dirnode.pack_children,
fn, kids, deep_immutable=True) fn, kids, deep_immutable=True)
class FakeMutableFile: class FakeMutableFile:
implements(IMutableFileNode) implements(IMutableFileNode)

View File

@ -457,7 +457,6 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin):
def _ready(res): def _ready(res):
k,happy,n = e.get_param("share_counts") k,happy,n = e.get_param("share_counts")
assert n == NUM_SHARES # else we'll be completely confused assert n == NUM_SHARES # else we'll be completely confused
all_peers = []
for shnum in range(NUM_SHARES): for shnum in range(NUM_SHARES):
mode = bucket_modes.get(shnum, "good") mode = bucket_modes.get(shnum, "good")
peer = FakeBucketReaderWriterProxy(mode) peer = FakeBucketReaderWriterProxy(mode)

View File

@ -30,7 +30,6 @@ class Node(unittest.TestCase):
needed_shares=3, needed_shares=3,
total_shares=10, total_shares=10,
size=1000) size=1000)
c = FakeClient()
cf = cachedir.CacheFile("none") cf = cachedir.CacheFile("none")
fn1 = ImmutableFileNode(u, None, None, None, None, cf) fn1 = ImmutableFileNode(u, None, None, None, None, cf)
fn2 = ImmutableFileNode(u, None, None, None, None, cf) fn2 = ImmutableFileNode(u, None, None, None, None, cf)
@ -55,7 +54,6 @@ class Node(unittest.TestCase):
def test_literal_filenode(self): def test_literal_filenode(self):
DATA = "I am a short file." DATA = "I am a short file."
u = uri.LiteralFileURI(data=DATA) u = uri.LiteralFileURI(data=DATA)
c = None
fn1 = LiteralFileNode(u) fn1 = LiteralFileNode(u)
fn2 = LiteralFileNode(u) fn2 = LiteralFileNode(u)
self.failUnlessEqual(fn1, fn2) self.failUnlessEqual(fn1, fn2)
@ -91,7 +89,6 @@ class Node(unittest.TestCase):
def test_mutable_filenode(self): def test_mutable_filenode(self):
client = FakeClient() client = FakeClient()
wk = "\x00"*16 wk = "\x00"*16
fp = "\x00"*32
rk = hashutil.ssk_readkey_hash(wk) rk = hashutil.ssk_readkey_hash(wk)
si = hashutil.ssk_storage_index_hash(rk) si = hashutil.ssk_storage_index_hash(rk)

View File

@ -81,7 +81,7 @@ class Incomplete(unittest.TestCase):
self.failUnlessEqual(ht.needed_hashes(5, True), set([12, 11, 6, 1])) self.failUnlessEqual(ht.needed_hashes(5, True), set([12, 11, 6, 1]))
def test_depth_of(self): def test_depth_of(self):
ht = hashtree.IncompleteHashTree(8) hashtree.IncompleteHashTree(8)
self.failUnlessEqual(hashtree.depth_of(0), 0) self.failUnlessEqual(hashtree.depth_of(0), 0)
for i in [1,2]: for i in [1,2]:
self.failUnlessEqual(hashtree.depth_of(i), 1, "i=%d"%i) self.failUnlessEqual(hashtree.depth_of(i), 1, "i=%d"%i)

View File

@ -78,7 +78,7 @@ class Test(common.ShareManglingMixin, unittest.TestCase):
random.shuffle(shnums) random.shuffle(shnums)
for i in shnums[:7]: for i in shnums[:7]:
self._corrupt_a_share(None, common._corrupt_offset_of_block_hashes_to_truncate_crypttext_hashes, i) self._corrupt_a_share(None, common._corrupt_offset_of_block_hashes_to_truncate_crypttext_hashes, i)
before_download_reads = self._count_reads() #before_download_reads = self._count_reads()
d.addCallback(_then_corrupt_7) d.addCallback(_then_corrupt_7)
d.addCallback(self._download_and_check_plaintext) d.addCallback(self._download_and_check_plaintext)
return d return d

View File

@ -129,7 +129,7 @@ class SystemTest(SystemTestMixin, unittest.TestCase):
log.msg("creating client %d: %s" % (i, tub.getShortTubID())) log.msg("creating client %d: %s" % (i, tub.getShortTubID()))
c = IntroducerClient(tub, self.introducer_furl, u"nickname-%d" % i, c = IntroducerClient(tub, self.introducer_furl, u"nickname-%d" % i,
"version", "oldest") "version", "oldest")
received_announcements[c] = ra = {} received_announcements[c] = {}
def got(serverid, ann_d, announcements): def got(serverid, ann_d, announcements):
announcements[serverid] = ann_d announcements[serverid] = ann_d
c.subscribe_to("storage", got, received_announcements[c]) c.subscribe_to("storage", got, received_announcements[c])

View File

@ -6,7 +6,6 @@ from twisted.internet import defer, reactor
from allmydata import uri, client from allmydata import uri, client
from allmydata.nodemaker import NodeMaker from allmydata.nodemaker import NodeMaker
from allmydata.util import base32 from allmydata.util import base32
from allmydata.util.idlib import shortnodeid_b2a
from allmydata.util.hashutil import tagged_hash, ssk_writekey_hash, \ from allmydata.util.hashutil import tagged_hash, ssk_writekey_hash, \
ssk_pubkey_fingerprint_hash ssk_pubkey_fingerprint_hash
from allmydata.interfaces import IRepairResults, ICheckAndRepairResults, \ from allmydata.interfaces import IRepairResults, ICheckAndRepairResults, \
@ -71,7 +70,6 @@ class FakeStorage:
self._pending_timer = None self._pending_timer = None
pending = self._pending pending = self._pending
self._pending = {} self._pending = {}
extra = []
for peerid in self._sequence: for peerid in self._sequence:
if peerid in pending: if peerid in pending:
d, shares = pending.pop(peerid) d, shares = pending.pop(peerid)
@ -602,7 +600,6 @@ class PublishMixin:
# publish a file and create shares, which can then be manipulated # publish a file and create shares, which can then be manipulated
# later. # later.
self.CONTENTS = "New contents go here" * 1000 self.CONTENTS = "New contents go here" * 1000
num_peers = 20
self._storage = FakeStorage() self._storage = FakeStorage()
self._nodemaker = make_nodemaker(self._storage) self._nodemaker = make_nodemaker(self._storage)
self._storage_broker = self._nodemaker.storage_broker self._storage_broker = self._nodemaker.storage_broker
@ -620,7 +617,6 @@ class PublishMixin:
"Contents 3a", "Contents 3a",
"Contents 3b"] "Contents 3b"]
self._copied_shares = {} self._copied_shares = {}
num_peers = 20
self._storage = FakeStorage() self._storage = FakeStorage()
self._nodemaker = make_nodemaker(self._storage) self._nodemaker = make_nodemaker(self._storage)
d = self._nodemaker.create_mutable_file(self.CONTENTS[0]) # seqnum=1 d = self._nodemaker.create_mutable_file(self.CONTENTS[0]) # seqnum=1
@ -757,7 +753,6 @@ class Servermap(unittest.TestCase, PublishMixin):
def test_mark_bad(self): def test_mark_bad(self):
d = defer.succeed(None) d = defer.succeed(None)
ms = self.make_servermap ms = self.make_servermap
us = self.update_servermap
d.addCallback(lambda res: ms(mode=MODE_READ)) d.addCallback(lambda res: ms(mode=MODE_READ))
d.addCallback(lambda sm: self.failUnlessOneRecoverable(sm, 6)) d.addCallback(lambda sm: self.failUnlessOneRecoverable(sm, 6))
@ -1435,7 +1430,6 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
self.failUnlessEqual(len(smap.recoverable_versions()), 1) self.failUnlessEqual(len(smap.recoverable_versions()), 1)
self.failIf(smap.unrecoverable_versions()) self.failIf(smap.unrecoverable_versions())
# now, which should have won? # now, which should have won?
roothash_s4a = self.get_roothash_for(3)
expected_contents = self.CONTENTS[3] expected_contents = self.CONTENTS[3]
new_versionid = smap.best_recoverable_version() new_versionid = smap.best_recoverable_version()
self.failUnlessEqual(new_versionid[0], 5) # seqnum 5 self.failUnlessEqual(new_versionid[0], 5) # seqnum 5
@ -1586,7 +1580,6 @@ class MultipleEncodings(unittest.TestCase):
sb = self._storage_broker sb = self._storage_broker
for peerid in sorted(sb.get_all_serverids()): for peerid in sorted(sb.get_all_serverids()):
peerid_s = shortnodeid_b2a(peerid)
for shnum in self._shares1.get(peerid, {}): for shnum in self._shares1.get(peerid, {}):
if shnum < len(places): if shnum < len(places):
which = places[shnum] which = places[shnum]
@ -1596,7 +1589,6 @@ class MultipleEncodings(unittest.TestCase):
in_1 = shnum in self._shares1[peerid] in_1 = shnum in self._shares1[peerid]
in_2 = shnum in self._shares2.get(peerid, {}) in_2 = shnum in self._shares2.get(peerid, {})
in_3 = shnum in self._shares3.get(peerid, {}) in_3 = shnum in self._shares3.get(peerid, {})
#print peerid_s, shnum, which, in_1, in_2, in_3
if which == 1: if which == 1:
if in_1: if in_1:
peers[shnum] = self._shares1[peerid][shnum] peers[shnum] = self._shares1[peerid][shnum]

View File

@ -253,7 +253,7 @@ class Server(unittest.TestCase):
return ss return ss
def test_create(self): def test_create(self):
ss = self.create("test_create") self.create("test_create")
def allocate(self, ss, storage_index, sharenums, size, canary=None): def allocate(self, ss, storage_index, sharenums, size, canary=None):
renew_secret = hashutil.tagged_hash("blah", "%d" % self._lease_secret.next()) renew_secret = hashutil.tagged_hash("blah", "%d" % self._lease_secret.next())
@ -326,7 +326,6 @@ class Server(unittest.TestCase):
self.failUnlessEqual(ss.remote_get_buckets("allocate"), {}) self.failUnlessEqual(ss.remote_get_buckets("allocate"), {})
canary = FakeCanary()
already,writers = self.allocate(ss, "allocate", [0,1,2], 75) already,writers = self.allocate(ss, "allocate", [0,1,2], 75)
self.failUnlessEqual(already, set()) self.failUnlessEqual(already, set())
self.failUnlessEqual(set(writers.keys()), set([0,1,2])) self.failUnlessEqual(set(writers.keys()), set([0,1,2]))
@ -387,7 +386,7 @@ class Server(unittest.TestCase):
f.write(struct.pack(">L", 0)) # this is invalid: minimum used is v1 f.write(struct.pack(">L", 0)) # this is invalid: minimum used is v1
f.close() f.close()
b = ss.remote_get_buckets("allocate") ss.remote_get_buckets("allocate")
e = self.failUnlessRaises(UnknownImmutableContainerVersionError, e = self.failUnlessRaises(UnknownImmutableContainerVersionError,
ss.remote_get_buckets, "si1") ss.remote_get_buckets, "si1")
@ -654,7 +653,6 @@ class Server(unittest.TestCase):
ss = StorageServer(workdir, "\x00" * 20, discard_storage=True) ss = StorageServer(workdir, "\x00" * 20, discard_storage=True)
ss.setServiceParent(self.sparent) ss.setServiceParent(self.sparent)
canary = FakeCanary()
already,writers = self.allocate(ss, "vid", [0,1,2], 75) already,writers = self.allocate(ss, "vid", [0,1,2], 75)
self.failUnlessEqual(already, set()) self.failUnlessEqual(already, set())
self.failUnlessEqual(set(writers.keys()), set([0,1,2])) self.failUnlessEqual(set(writers.keys()), set([0,1,2]))
@ -733,7 +731,7 @@ class MutableServer(unittest.TestCase):
return ss return ss
def test_create(self): def test_create(self):
ss = self.create("test_create") self.create("test_create")
def write_enabler(self, we_tag): def write_enabler(self, we_tag):
return hashutil.tagged_hash("we_blah", we_tag) return hashutil.tagged_hash("we_blah", we_tag)

View File

@ -465,7 +465,6 @@ class SystemTest(SystemTestMixin, unittest.TestCase):
def _done(res): def _done(res):
log.msg("DONE: %s" % (res,)) log.msg("DONE: %s" % (res,))
self._mutable_node_1 = res self._mutable_node_1 = res
uri = res.get_uri()
d1.addCallback(_done) d1.addCallback(_done)
return d1 return d1
d.addCallback(_create_mutable) d.addCallback(_create_mutable)
@ -1364,13 +1363,11 @@ class SystemTest(SystemTestMixin, unittest.TestCase):
# network calls) # network calls)
private_uri = self._private_node.get_uri() private_uri = self._private_node.get_uri()
some_uri = self._root_directory_uri
client0_basedir = self.getdir("client0") client0_basedir = self.getdir("client0")
nodeargs = [ nodeargs = [
"--node-directory", client0_basedir, "--node-directory", client0_basedir,
] ]
TESTDATA = "I will not write the same thing over and over.\n" * 100
d = defer.succeed(None) d = defer.succeed(None)

View File

@ -380,7 +380,6 @@ class Dirnode(unittest.TestCase):
u0 = uri.LiteralFileURI("data") u0 = uri.LiteralFileURI("data")
u1 = uri.LiteralDirectoryURI(u0) u1 = uri.LiteralDirectoryURI(u0)
self.failUnless(str(u1)) self.failUnless(str(u1))
u1s = u1.to_string()
self.failUnlessEqual(u1.to_string(), "URI:DIR2-LIT:mrqxiyi") self.failUnlessEqual(u1.to_string(), "URI:DIR2-LIT:mrqxiyi")
self.failUnless(u1.is_readonly()) self.failUnless(u1.is_readonly())
self.failIf(u1.is_mutable()) self.failIf(u1.is_mutable())

View File

@ -176,7 +176,7 @@ class Statistics(unittest.TestCase):
try: try:
func(*args, **kwargs) func(*args, **kwargs)
self.fail(msg) self.fail(msg)
except AssertionError, e: except AssertionError:
pass pass
def failUnlessListEqual(self, a, b, msg = None): def failUnlessListEqual(self, a, b, msg = None):
@ -320,9 +320,6 @@ class Asserts(unittest.TestCase):
self.fail("assert was not caught") self.fail("assert was not caught")
def should_not_assert(self, func, *args, **kwargs): def should_not_assert(self, func, *args, **kwargs):
if "re" in kwargs:
regexp = kwargs["re"]
del kwargs["re"]
try: try:
func(*args, **kwargs) func(*args, **kwargs)
except AssertionError, e: except AssertionError, e:

View File

@ -438,6 +438,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
try: try:
from allmydata import reliability from allmydata import reliability
_hush_pyflakes = reliability _hush_pyflakes = reliability
del _hush_pyflakes
except: except:
raise unittest.SkipTest("reliability tool requires NumPy") raise unittest.SkipTest("reliability tool requires NumPy")
@ -483,7 +484,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
def _check_json(res): def _check_json(res):
data = simplejson.loads(res) data = simplejson.loads(res)
self.failUnless(isinstance(data, dict)) self.failUnless(isinstance(data, dict))
active = data["active"] #active = data["active"]
# TODO: test more. We need a way to fake an active operation # TODO: test more. We need a way to fake an active operation
# here. # here.
d.addCallback(_check_json) d.addCallback(_check_json)

View File

@ -529,7 +529,7 @@ def from_string(s):
def is_uri(s): def is_uri(s):
try: try:
uri = from_string(s) from_string(s)
return True return True
except (TypeError, AssertionError): except (TypeError, AssertionError):
return False return False

View File

@ -247,7 +247,6 @@ def a2b_l(cs, lengthinbits):
octets = [] octets = []
pos = 2048 pos = 2048
num = qs[0] * pos num = qs[0] * pos
readybits = 5
i = 1 i = 1
while len(octets) < numoctets: while len(octets) < numoctets:
while pos > 256: while pos > 256:

View File

@ -33,7 +33,6 @@ def next_power_of_k(n, k):
x = 0 x = 0
else: else:
x = int(math.log(n, k) + 0.5) x = int(math.log(n, k) + 0.5)
r = k**x
if k**x < n: if k**x < n:
return k**(x+1) return k**(x+1)
else: else:

View File

@ -177,7 +177,6 @@ class ResultsBase:
def _render_si_link(self, ctx, storage_index): def _render_si_link(self, ctx, storage_index):
si_s = base32.b2a(storage_index) si_s = base32.b2a(storage_index)
root = get_root(ctx)
req = inevow.IRequest(ctx) req = inevow.IRequest(ctx)
ophandle = req.prepath[-1] ophandle = req.prepath[-1]
target = "%s/operations/%s/%s" % (get_root(ctx), ophandle, si_s) target = "%s/operations/%s/%s" % (get_root(ctx), ophandle, si_s)

View File

@ -60,7 +60,6 @@ class DirectoryNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
self.name = name self.name = name
def childFactory(self, ctx, name): def childFactory(self, ctx, name):
req = IRequest(ctx)
name = name.decode("utf-8") name = name.decode("utf-8")
if not name: if not name:
raise EmptyPathnameComponentError() raise EmptyPathnameComponentError()

View File

@ -355,7 +355,6 @@ class FileDownloader(rend.Page):
if encoding: if encoding:
req.setHeader("content-encoding", encoding) req.setHeader("content-encoding", encoding)
save_to_filename = None
if boolean_of_arg(get_arg(req, "save", "False")): if boolean_of_arg(get_arg(req, "save", "False")):
# tell the browser to save the file rather display it we don't # tell the browser to save the file rather display it we don't
# try to encode the filename, instead we echo back the exact same # try to encode the filename, instead we echo back the exact same

View File

@ -53,7 +53,6 @@ class MoreInfo(rend.Page):
def render_size(self, ctx, data): def render_size(self, ctx, data):
node = self.original node = self.original
si = node.get_storage_index()
d = node.get_current_size() d = node.get_current_size()
def _no_size(size): def _no_size(size):
if size is None: if size is None:

View File

@ -1,5 +1,5 @@
from nevow import rend, inevow, tags as T from nevow import rend, tags as T
reliability = None # might not be usable reliability = None # might not be usable
try: try:
from allmydata import reliability # requires NumPy from allmydata import reliability # requires NumPy
@ -58,7 +58,6 @@ class ReliabilityTool(rend.Page):
return "%d" % s return "%d" % s
def get_parameters(self, ctx): def get_parameters(self, ctx):
req = inevow.IRequest(ctx)
parameters = {} parameters = {}
for (name,default,argtype,description) in self.DEFAULT_PARAMETERS: for (name,default,argtype,description) in self.DEFAULT_PARAMETERS:
v = get_arg(ctx, name, default) v = get_arg(ctx, name, default)

View File

@ -184,7 +184,6 @@ class StorageStatus(rend.Page):
ecr = ec["space-recovered"] ecr = ec["space-recovered"]
p = T.ul() p = T.ul()
pieces = []
def add(*pieces): def add(*pieces):
p[T.li[pieces]] p[T.li[pieces]]
@ -243,7 +242,6 @@ class StorageStatus(rend.Page):
] ]
p = T.ul() p = T.ul()
pieces = []
def add(*pieces): def add(*pieces):
p[T.li[pieces]] p[T.li[pieces]]