mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-02-21 02:01:31 +00:00
Replace uses of os.path.abspath with abspath_expanduser_unicode where necessary. This makes basedir paths consistently represented as Unicode.
This commit is contained in:
parent
11b18824c7
commit
618db4867c
@ -11,6 +11,8 @@ from allmydata import get_package_versions, get_package_versions_string
|
||||
from allmydata.util import log
|
||||
from allmydata.util import fileutil, iputil, observer
|
||||
from allmydata.util.assertutil import precondition, _assert
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.encodingutil import get_filesystem_encoding
|
||||
|
||||
# Add our application versions to the data that Foolscap's LogPublisher
|
||||
# reports.
|
||||
@ -49,9 +51,9 @@ class Node(service.MultiService):
|
||||
PORTNUMFILE = None
|
||||
CERTFILE = "node.pem"
|
||||
|
||||
def __init__(self, basedir="."):
|
||||
def __init__(self, basedir=u"."):
|
||||
service.MultiService.__init__(self)
|
||||
self.basedir = os.path.abspath(basedir)
|
||||
self.basedir = abspath_expanduser_unicode(unicode(basedir))
|
||||
self._portnumfile = os.path.join(self.basedir, self.PORTNUMFILE)
|
||||
self._tub_ready_observerlist = observer.OneShotObserverList()
|
||||
fileutil.make_dirs(os.path.join(self.basedir, "private"), 0700)
|
||||
@ -74,12 +76,12 @@ class Node(service.MultiService):
|
||||
iputil.increase_rlimits()
|
||||
|
||||
def init_tempdir(self):
|
||||
local_tempdir = "tmp" # default is NODEDIR/tmp/
|
||||
tempdir = self.get_config("node", "tempdir", local_tempdir)
|
||||
local_tempdir_utf8 = "tmp" # default is NODEDIR/tmp/
|
||||
tempdir = self.get_config("node", "tempdir", local_tempdir_utf8).decode('utf-8')
|
||||
tempdir = os.path.join(self.basedir, tempdir)
|
||||
if not os.path.exists(tempdir):
|
||||
fileutil.make_dirs(tempdir)
|
||||
tempfile.tempdir = os.path.abspath(tempdir)
|
||||
tempfile.tempdir = abspath_expanduser_unicode(tempdir)
|
||||
# this should cause twisted.web.http (which uses
|
||||
# tempfile.TemporaryFile) to put large request bodies in the given
|
||||
# directory. Without this, the default temp dir is usually /tmp/,
|
||||
@ -167,9 +169,9 @@ class Node(service.MultiService):
|
||||
def setup_ssh(self):
|
||||
ssh_port = self.get_config("node", "ssh.port", "")
|
||||
if ssh_port:
|
||||
ssh_keyfile = self.get_config("node", "ssh.authorized_keys_file")
|
||||
ssh_keyfile = self.get_config("node", "ssh.authorized_keys_file").decode('utf-8')
|
||||
from allmydata import manhole
|
||||
m = manhole.AuthorizedKeysManhole(ssh_port, ssh_keyfile)
|
||||
m = manhole.AuthorizedKeysManhole(ssh_port, ssh_keyfile.encode(get_filesystem_encoding()))
|
||||
m.setServiceParent(self)
|
||||
self.log("AuthorizedKeysManhole listening on %s" % ssh_port)
|
||||
|
||||
|
@ -9,6 +9,8 @@ import os.path, sys, time, random, stat
|
||||
from allmydata.util.netstring import netstring
|
||||
from allmydata.util.hashutil import backupdb_dirhash
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.encodingutil import to_str
|
||||
|
||||
DAY = 24*60*60
|
||||
MONTH = 30*DAY
|
||||
@ -203,7 +205,7 @@ class BackupDB_v2:
|
||||
current working directory. The database stores absolute pathnames.
|
||||
"""
|
||||
|
||||
path = os.path.abspath(path)
|
||||
path = abspath_expanduser_unicode(path)
|
||||
s = os.stat(path)
|
||||
size = s[stat.ST_SIZE]
|
||||
ctime = s[stat.ST_CTIME]
|
||||
@ -246,7 +248,7 @@ class BackupDB_v2:
|
||||
probability = min(max(probability, 0.0), 1.0)
|
||||
should_check = bool(random.random() < probability)
|
||||
|
||||
return FileResult(self, str(filecap), should_check,
|
||||
return FileResult(self, to_str(filecap), should_check,
|
||||
path, mtime, ctime, size)
|
||||
|
||||
def get_or_allocate_fileid_for_cap(self, filecap):
|
||||
@ -349,7 +351,7 @@ class BackupDB_v2:
|
||||
probability = min(max(probability, 0.0), 1.0)
|
||||
should_check = bool(random.random() < probability)
|
||||
|
||||
return DirectoryResult(self, dirhash_s, str(dircap), should_check)
|
||||
return DirectoryResult(self, dirhash_s, to_str(dircap), should_check)
|
||||
|
||||
def did_create_directory(self, dircap, dirhash):
|
||||
now = time.time()
|
||||
|
@ -85,9 +85,12 @@ def write_node_config(c, config):
|
||||
|
||||
|
||||
def create_node(basedir, config, out=sys.stdout, err=sys.stderr):
|
||||
# This should always be called with an absolute Unicode basedir.
|
||||
precondition(isinstance(basedir, unicode), basedir)
|
||||
|
||||
if os.path.exists(basedir):
|
||||
if os.listdir(basedir):
|
||||
print >>err, "The base directory \"%s\", which is \"%s\" is not empty." % (basedir, os.path.abspath(basedir))
|
||||
if listdir_unicode(basedir):
|
||||
print >>err, "The base directory %s is not empty." % quote_output(basedir)
|
||||
print >>err, "To avoid clobbering anything, I am going to quit now."
|
||||
print >>err, "Please use a different directory, or empty this one."
|
||||
return -1
|
||||
@ -130,7 +133,7 @@ def create_node(basedir, config, out=sys.stdout, err=sys.stderr):
|
||||
|
||||
from allmydata.util import fileutil
|
||||
fileutil.make_dirs(os.path.join(basedir, "private"), 0700)
|
||||
print >>out, "Node created in %s" % basedir
|
||||
print >>out, "Node created in %s" % quote_output(basedir)
|
||||
if not config.get("introducer", ""):
|
||||
print >>out, " Please set [client]introducer.furl= in tahoe.cfg!"
|
||||
print >>out, " The node cannot connect to a grid without it."
|
||||
@ -144,9 +147,12 @@ def create_client(basedir, config, out=sys.stdout, err=sys.stderr):
|
||||
|
||||
|
||||
def create_introducer(basedir, config, out=sys.stdout, err=sys.stderr):
|
||||
# This should always be called with an absolute Unicode basedir.
|
||||
precondition(isinstance(basedir, unicode), basedir)
|
||||
|
||||
if os.path.exists(basedir):
|
||||
if os.listdir(basedir):
|
||||
print >>err, "The base directory \"%s\", which is \"%s\" is not empty." % (basedir, os.path.abspath(basedir))
|
||||
if listdir_unicode(basedir):
|
||||
print >>err, "The base directory %s is not empty." % quote_output(basedir)
|
||||
print >>err, "To avoid clobbering anything, I am going to quit now."
|
||||
print >>err, "Please use a different directory, or empty this one."
|
||||
return -1
|
||||
@ -161,7 +167,7 @@ def create_introducer(basedir, config, out=sys.stdout, err=sys.stderr):
|
||||
write_node_config(c, config)
|
||||
c.close()
|
||||
|
||||
print >>out, "Introducer created in %s" % basedir
|
||||
print >>out, "Introducer created in %s" % quote_output(basedir)
|
||||
|
||||
|
||||
subCommands = [
|
||||
|
@ -5,6 +5,7 @@ import struct, time, os
|
||||
from twisted.python import usage, failure
|
||||
from twisted.internet import defer
|
||||
|
||||
|
||||
class DumpOptions(usage.Options):
|
||||
def getSynopsis(self):
|
||||
return "Usage: tahoe debug dump-share SHARE_FILENAME"
|
||||
@ -28,15 +29,17 @@ verify-cap for the file that uses the share.
|
||||
return t
|
||||
|
||||
def parseArgs(self, filename):
|
||||
self['filename'] = filename
|
||||
from allmydata.util.encodingutil import argv_to_abspath
|
||||
self['filename'] = argv_to_abspath(filename)
|
||||
|
||||
def dump_share(options):
|
||||
from allmydata.storage.mutable import MutableShareFile
|
||||
from allmydata.util.encodingutil import quote_output
|
||||
|
||||
out = options.stdout
|
||||
|
||||
# check the version, to see if we have a mutable or immutable share
|
||||
print >>out, "share filename: %s" % options['filename']
|
||||
print >>out, "share filename: %s" % quote_output(options['filename'])
|
||||
|
||||
f = open(options['filename'], "rb")
|
||||
prefix = f.read(32)
|
||||
@ -61,6 +64,8 @@ def dump_immutable_chk_share(f, out, options):
|
||||
from allmydata import uri
|
||||
from allmydata.util import base32
|
||||
from allmydata.immutable.layout import ReadBucketProxy
|
||||
from allmydata.util.encodingutil import quote_output, to_str
|
||||
|
||||
# use a ReadBucketProxy to parse the bucket and find the uri extension
|
||||
bp = ReadBucketProxy(None, '', '')
|
||||
offsets = bp._parse_offsets(f.read_share_data(0, 0x44))
|
||||
@ -104,14 +109,16 @@ def dump_immutable_chk_share(f, out, options):
|
||||
# the storage index isn't stored in the share itself, so we depend upon
|
||||
# knowing the parent directory name to get it
|
||||
pieces = options['filename'].split(os.sep)
|
||||
if len(pieces) >= 2 and base32.could_be_base32_encoded(pieces[-2]):
|
||||
storage_index = base32.a2b(pieces[-2])
|
||||
uri_extension_hash = base32.a2b(unpacked["UEB_hash"])
|
||||
u = uri.CHKFileVerifierURI(storage_index, uri_extension_hash,
|
||||
unpacked["needed_shares"],
|
||||
unpacked["total_shares"], unpacked["size"])
|
||||
verify_cap = u.to_string()
|
||||
print >>out, "%20s: %s" % ("verify-cap", verify_cap)
|
||||
if len(pieces) >= 2:
|
||||
piece = to_str(pieces[-2])
|
||||
if base32.could_be_base32_encoded(piece):
|
||||
storage_index = base32.a2b(piece)
|
||||
uri_extension_hash = base32.a2b(unpacked["UEB_hash"])
|
||||
u = uri.CHKFileVerifierURI(storage_index, uri_extension_hash,
|
||||
unpacked["needed_shares"],
|
||||
unpacked["total_shares"], unpacked["size"])
|
||||
verify_cap = u.to_string()
|
||||
print >>out, "%20s: %s" % ("verify-cap", quote_output(verify_cap, quotemarks=False))
|
||||
|
||||
sizes = {}
|
||||
sizes['data'] = (offsets['plaintext_hash_tree'] -
|
||||
@ -210,6 +217,7 @@ def dump_SDMF_share(m, length, options):
|
||||
from allmydata.mutable.common import NeedMoreDataError
|
||||
from allmydata.util import base32, hashutil
|
||||
from allmydata.uri import SSKVerifierURI
|
||||
from allmydata.util.encodingutil import quote_output, to_str
|
||||
|
||||
offset = m.DATA_OFFSET
|
||||
|
||||
@ -256,12 +264,14 @@ def dump_SDMF_share(m, length, options):
|
||||
# the storage index isn't stored in the share itself, so we depend upon
|
||||
# knowing the parent directory name to get it
|
||||
pieces = options['filename'].split(os.sep)
|
||||
if len(pieces) >= 2 and base32.could_be_base32_encoded(pieces[-2]):
|
||||
storage_index = base32.a2b(pieces[-2])
|
||||
fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
|
||||
u = SSKVerifierURI(storage_index, fingerprint)
|
||||
verify_cap = u.to_string()
|
||||
print >>out, " verify-cap:", verify_cap
|
||||
if len(pieces) >= 2:
|
||||
piece = to_str(pieces[-2])
|
||||
if base32.could_be_base32_encoded(piece):
|
||||
storage_index = base32.a2b(piece)
|
||||
fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
|
||||
u = SSKVerifierURI(storage_index, fingerprint)
|
||||
verify_cap = u.to_string()
|
||||
print >>out, " verify-cap:", quote_output(verify_cap, quotemarks=False)
|
||||
|
||||
if options['offsets']:
|
||||
# NOTE: this offset-calculation code is fragile, and needs to be
|
||||
@ -380,6 +390,7 @@ def dump_uri_instance(u, nodeid, secret, out, show_header=True):
|
||||
from allmydata import uri
|
||||
from allmydata.storage.server import si_b2a
|
||||
from allmydata.util import base32, hashutil
|
||||
from allmydata.util.encodingutil import quote_output
|
||||
|
||||
if isinstance(u, uri.CHKFileURI):
|
||||
if show_header:
|
||||
@ -401,7 +412,7 @@ def dump_uri_instance(u, nodeid, secret, out, show_header=True):
|
||||
elif isinstance(u, uri.LiteralFileURI):
|
||||
if show_header:
|
||||
print >>out, "Literal File URI:"
|
||||
print >>out, " data:", u.data
|
||||
print >>out, " data:", quote_output(u.data)
|
||||
|
||||
elif isinstance(u, uri.WriteableSSKFileURI):
|
||||
if show_header:
|
||||
@ -447,9 +458,12 @@ def dump_uri_instance(u, nodeid, secret, out, show_header=True):
|
||||
class FindSharesOptions(usage.Options):
|
||||
def getSynopsis(self):
|
||||
return "Usage: tahoe debug find-shares STORAGE_INDEX NODEDIRS.."
|
||||
|
||||
def parseArgs(self, storage_index_s, *nodedirs):
|
||||
from allmydata.util.encodingutil import argv_to_abspath
|
||||
self.si_s = storage_index_s
|
||||
self.nodedirs = nodedirs
|
||||
self.nodedirs = map(argv_to_abspath, nodedirs)
|
||||
|
||||
def getUsage(self, width=None):
|
||||
t = usage.Options.getUsage(self, width)
|
||||
t += """
|
||||
@ -478,13 +492,14 @@ def find_shares(options):
|
||||
/home/warner/testnet/node-2/storage/shares/44k/44kai1tui348689nrw8fjegc8c/2
|
||||
"""
|
||||
from allmydata.storage.server import si_a2b, storage_index_to_dir
|
||||
from allmydata.util.encodingutil import listdir_unicode
|
||||
|
||||
out = options.stdout
|
||||
sharedir = storage_index_to_dir(si_a2b(options.si_s))
|
||||
for d in options.nodedirs:
|
||||
d = os.path.join(os.path.expanduser(d), "storage/shares", sharedir)
|
||||
d = os.path.join(d, "storage/shares", sharedir)
|
||||
if os.path.exists(d):
|
||||
for shnum in os.listdir(d):
|
||||
for shnum in listdir_unicode(d):
|
||||
print >>out, os.path.join(d, shnum)
|
||||
|
||||
return 0
|
||||
@ -495,7 +510,8 @@ class CatalogSharesOptions(usage.Options):
|
||||
|
||||
"""
|
||||
def parseArgs(self, *nodedirs):
|
||||
self.nodedirs = nodedirs
|
||||
from allmydata.util.encodingutil import argv_to_abspath
|
||||
self.nodedirs = map(argv_to_abspath, nodedirs)
|
||||
if not nodedirs:
|
||||
raise usage.UsageError("must specify at least one node directory")
|
||||
|
||||
@ -543,6 +559,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
|
||||
from allmydata.mutable.common import NeedMoreDataError
|
||||
from allmydata.immutable.layout import ReadBucketProxy
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.encodingutil import quote_output
|
||||
import struct
|
||||
|
||||
f = open(abs_sharefile, "rb")
|
||||
@ -582,9 +599,9 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
|
||||
print >>out, "SDMF %s %d/%d %d #%d:%s %d %s" % \
|
||||
(si_s, k, N, datalen,
|
||||
seqnum, base32.b2a(root_hash),
|
||||
expiration, abs_sharefile)
|
||||
expiration, quote_output(abs_sharefile))
|
||||
else:
|
||||
print >>out, "UNKNOWN mutable %s" % (abs_sharefile,)
|
||||
print >>out, "UNKNOWN mutable %s" % quote_output(abs_sharefile)
|
||||
|
||||
elif struct.unpack(">L", prefix[:4]) == (1,):
|
||||
# immutable
|
||||
@ -616,21 +633,23 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
|
||||
|
||||
print >>out, "CHK %s %d/%d %d %s %d %s" % (si_s, k, N, filesize,
|
||||
ueb_hash, expiration,
|
||||
abs_sharefile)
|
||||
quote_output(abs_sharefile))
|
||||
|
||||
else:
|
||||
print >>out, "UNKNOWN really-unknown %s" % (abs_sharefile,)
|
||||
print >>out, "UNKNOWN really-unknown %s" % quote_output(abs_sharefile)
|
||||
|
||||
f.close()
|
||||
|
||||
def catalog_shares(options):
|
||||
from allmydata.util.encodingutil import listdir_unicode, quote_output
|
||||
|
||||
out = options.stdout
|
||||
err = options.stderr
|
||||
now = time.time()
|
||||
for d in options.nodedirs:
|
||||
d = os.path.join(os.path.expanduser(d), "storage/shares")
|
||||
d = os.path.join(d, "storage/shares")
|
||||
try:
|
||||
abbrevs = os.listdir(d)
|
||||
abbrevs = listdir_unicode(d)
|
||||
except EnvironmentError:
|
||||
# ignore nodes that have storage turned off altogether
|
||||
pass
|
||||
@ -640,33 +659,34 @@ def catalog_shares(options):
|
||||
continue
|
||||
abbrevdir = os.path.join(d, abbrevdir)
|
||||
# this tool may get run against bad disks, so we can't assume
|
||||
# that os.listdir will always succeed. Try to catalog as much
|
||||
# that listdir_unicode will always succeed. Try to catalog as much
|
||||
# as possible.
|
||||
try:
|
||||
sharedirs = os.listdir(abbrevdir)
|
||||
sharedirs = listdir_unicode(abbrevdir)
|
||||
for si_s in sharedirs:
|
||||
si_dir = os.path.join(abbrevdir, si_s)
|
||||
catalog_shares_one_abbrevdir(si_s, si_dir, now, out,err)
|
||||
except:
|
||||
print >>err, "Error processing %s" % abbrevdir
|
||||
print >>err, "Error processing %s" % quote_output(abbrevdir)
|
||||
failure.Failure().printTraceback(err)
|
||||
|
||||
return 0
|
||||
|
||||
def catalog_shares_one_abbrevdir(si_s, si_dir, now, out, err):
|
||||
from allmydata.util.encodingutil import listdir_unicode, quote_output
|
||||
|
||||
try:
|
||||
for shnum_s in os.listdir(si_dir):
|
||||
for shnum_s in listdir_unicode(si_dir):
|
||||
abs_sharefile = os.path.join(si_dir, shnum_s)
|
||||
abs_sharefile = os.path.abspath(abs_sharefile)
|
||||
assert os.path.isfile(abs_sharefile)
|
||||
try:
|
||||
describe_share(abs_sharefile, si_s, shnum_s, now,
|
||||
out)
|
||||
except:
|
||||
print >>err, "Error processing %s" % abs_sharefile
|
||||
print >>err, "Error processing %s" % quote_output(abs_sharefile)
|
||||
failure.Failure().printTraceback(err)
|
||||
except:
|
||||
print >>err, "Error processing %s" % si_dir
|
||||
print >>err, "Error processing %s" % quote_output(si_dir)
|
||||
failure.Failure().printTraceback(err)
|
||||
|
||||
class CorruptShareOptions(usage.Options):
|
||||
|
@ -29,8 +29,8 @@ def create_key_generator(config, out=sys.stdout, err=sys.stderr):
|
||||
print >>err, "a basedir was not provided, please use --basedir or -C"
|
||||
return -1
|
||||
if os.path.exists(basedir):
|
||||
if os.listdir(basedir):
|
||||
print >>err, "The base directory \"%s\", which is \"%s\" is not empty." % (basedir, os.path.abspath(basedir))
|
||||
if listdir_unicode(basedir):
|
||||
print >>err, "The base directory %s is not empty." % quote_output(basedir)
|
||||
print >>err, "To avoid clobbering anything, I am going to quit now."
|
||||
print >>err, "Please use a different directory, or empty this one."
|
||||
return -1
|
||||
|
@ -11,6 +11,7 @@ from allmydata.util import time_format
|
||||
from allmydata.scripts import backupdb
|
||||
from allmydata.util.encodingutil import listdir_unicode, quote_output, to_str, FilenameEncodingError
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
|
||||
|
||||
def get_local_metadata(path):
|
||||
@ -84,7 +85,7 @@ class BackerUpper:
|
||||
self.backupdb = None
|
||||
bdbfile = os.path.join(options["node-directory"],
|
||||
"private", "backupdb.sqlite")
|
||||
bdbfile = os.path.abspath(bdbfile)
|
||||
bdbfile = abspath_expanduser_unicode(bdbfile)
|
||||
self.backupdb = backupdb.get_backupdb(bdbfile, stderr)
|
||||
if not self.backupdb:
|
||||
print >>stderr, "ERROR: Unable to load backup db."
|
||||
|
@ -9,6 +9,7 @@ from allmydata.scripts.common import get_alias, escape_path, \
|
||||
from allmydata.scripts.common_http import do_http, HTTPError
|
||||
from allmydata import uri
|
||||
from allmydata.util import fileutil
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.encodingutil import unicode_to_url, listdir_unicode, quote_output, to_str
|
||||
from allmydata.util.assertutil import precondition
|
||||
|
||||
@ -511,7 +512,7 @@ class Copier:
|
||||
rootcap, path = get_alias(self.aliases, destination_spec, None)
|
||||
if rootcap == DefaultAliasMarker:
|
||||
# no alias, so this is a local file
|
||||
pathname = os.path.abspath(os.path.expanduser(path.decode('utf-8')))
|
||||
pathname = abspath_expanduser_unicode(path.decode('utf-8'))
|
||||
if not os.path.exists(pathname):
|
||||
t = LocalMissingTarget(pathname)
|
||||
elif os.path.isdir(pathname):
|
||||
@ -551,7 +552,7 @@ class Copier:
|
||||
rootcap, path = get_alias(self.aliases, source_spec, None)
|
||||
if rootcap == DefaultAliasMarker:
|
||||
# no alias, so this is a local file
|
||||
pathname = os.path.abspath(os.path.expanduser(path.decode('utf-8')))
|
||||
pathname = abspath_expanduser_unicode(path.decode('utf-8'))
|
||||
name = os.path.basename(pathname)
|
||||
if not os.path.exists(pathname):
|
||||
raise MissingSourceError(source_spec)
|
||||
|
@ -65,8 +65,9 @@ class GridTesterOptions(usage.Options):
|
||||
]
|
||||
|
||||
def parseArgs(self, nodedir, tahoe):
|
||||
self.nodedir = nodedir
|
||||
self.tahoe = os.path.abspath(tahoe)
|
||||
# Note: does not support Unicode arguments.
|
||||
self.nodedir = os.path.expanduser(nodedir)
|
||||
self.tahoe = os.path.abspath(os.path.expanduser(tahoe))
|
||||
|
||||
class CommandFailed(Exception):
|
||||
pass
|
||||
|
@ -9,6 +9,8 @@ from allmydata import client, introducer
|
||||
from allmydata.immutable import upload
|
||||
from allmydata.scripts import create_node
|
||||
from allmydata.util import fileutil, pollmixin
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.encodingutil import get_filesystem_encoding
|
||||
from foolscap.api import Tub, fireEventually, flushEventualQueue
|
||||
from twisted.python import log
|
||||
|
||||
@ -63,8 +65,8 @@ class SystemFramework(pollmixin.PollMixin):
|
||||
numnodes = 7
|
||||
|
||||
def __init__(self, basedir, mode):
|
||||
self.basedir = basedir = os.path.abspath(basedir)
|
||||
if not basedir.startswith(os.path.abspath(".")):
|
||||
self.basedir = basedir = abspath_expanduser_unicode(unicode(basedir))
|
||||
if not (basedir + os.path.sep).startswith(abspath_expanduser_unicode(u".") + os.path.sep):
|
||||
raise AssertionError("safety issue: basedir must be a subdir")
|
||||
self.testdir = testdir = os.path.join(basedir, "test")
|
||||
if os.path.exists(testdir):
|
||||
@ -226,7 +228,9 @@ this file are ignored.
|
||||
def start_client(self):
|
||||
# this returns a Deferred that fires with the client's control.furl
|
||||
log.msg("MAKING CLIENT")
|
||||
clientdir = self.clientdir = os.path.join(self.testdir, "client")
|
||||
# self.testdir is an absolute Unicode path
|
||||
clientdir = self.clientdir = os.path.join(self.testdir, u"client")
|
||||
clientdir_str = clientdir.encode(get_filesystem_encoding())
|
||||
quiet = StringIO()
|
||||
create_node.create_node(clientdir, {}, out=quiet)
|
||||
log.msg("DONE MAKING CLIENT")
|
||||
@ -265,7 +269,7 @@ this file are ignored.
|
||||
logfile = os.path.join(self.basedir, "client.log")
|
||||
cmd = ["twistd", "-n", "-y", "tahoe-client.tac", "-l", logfile]
|
||||
env = os.environ.copy()
|
||||
self.proc = reactor.spawnProcess(pp, cmd[0], cmd, env, path=clientdir)
|
||||
self.proc = reactor.spawnProcess(pp, cmd[0], cmd, env, path=clientdir_str)
|
||||
log.msg("CLIENT STARTED")
|
||||
|
||||
# now we wait for the client to get started. we're looking for the
|
||||
|
@ -33,7 +33,8 @@ from twisted.python import usage
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.encodingutil import listdir_unicode, unicode_platform, \
|
||||
quote_output, get_output_encoding, get_argv_encoding, get_filesystem_encoding, \
|
||||
unicode_to_output, to_str
|
||||
unicode_to_output, to_str, to_argv
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
|
||||
timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
|
||||
|
||||
@ -181,7 +182,7 @@ class CLI(CLITestMixin, unittest.TestCase):
|
||||
u = uri.LiteralFileURI("this is some data")
|
||||
output = self._dump_cap(u.to_string())
|
||||
self.failUnless("Literal File URI:" in output, output)
|
||||
self.failUnless("data: this is some data" in output, output)
|
||||
self.failUnless("data: 'this is some data'" in output, output)
|
||||
|
||||
def test_dump_cap_ssk(self):
|
||||
writekey = "\x01" * 16
|
||||
@ -774,7 +775,7 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.set_up_grid()
|
||||
|
||||
rel_fn = os.path.join(self.basedir, "DATAFILE")
|
||||
abs_fn = os.path.abspath(rel_fn)
|
||||
abs_fn = to_argv(abspath_expanduser_unicode(unicode(rel_fn)))
|
||||
# we make the file small enough to fit in a LIT file, for speed
|
||||
fileutil.write(rel_fn, "short file")
|
||||
d = self.do_cli("put", rel_fn)
|
||||
|
@ -259,10 +259,10 @@ class StdlibUnicode(unittest.TestCase):
|
||||
raise unittest.SkipTest("%r\nIt is possible that the filesystem on which this test is being run "
|
||||
"does not support Unicode, even though the platform does." % (e,))
|
||||
|
||||
fn = lumiere_nfc + '/' + lumiere_nfc + '.txt'
|
||||
fn = lumiere_nfc + u'/' + lumiere_nfc + u'.txt'
|
||||
open(fn, 'wb').close()
|
||||
self.failUnless(os.path.exists(fn))
|
||||
self.failUnless(os.path.exists(os.path.abspath(fn)))
|
||||
self.failUnless(os.path.exists(os.path.join(os.getcwdu(), fn)))
|
||||
filenames = listdir_unicode(lumiere_nfc)
|
||||
|
||||
# We only require that the listing includes a filename that is canonically equivalent
|
||||
|
@ -192,7 +192,7 @@ class CreateNode(unittest.TestCase):
|
||||
rc, out, err = self.run_tahoe(argv)
|
||||
self.failIfEqual(rc, 0, str((out, err, rc)))
|
||||
self.failUnlessEqual(out, "")
|
||||
self.failUnless("is not empty." in err)
|
||||
self.failUnlessIn("is not empty.", err)
|
||||
|
||||
# make sure it rejects too many arguments
|
||||
argv = ["create-key-generator", "basedir", "extraarg"]
|
||||
|
@ -12,6 +12,8 @@ from allmydata.immutable import offloaded, upload
|
||||
from allmydata.immutable.filenode import ImmutableFileNode, LiteralFileNode
|
||||
from allmydata.util import idlib, mathutil
|
||||
from allmydata.util import log, base32
|
||||
from allmydata.util.encodingutil import quote_output
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.consumer import MemoryConsumer, download_to_data
|
||||
from allmydata.scripts import runner
|
||||
from allmydata.interfaces import IDirectoryNode, IFileNode, \
|
||||
@ -1285,9 +1287,9 @@ class SystemTest(SystemTestMixin, unittest.TestCase):
|
||||
if magic == '\x00\x00\x00\x01':
|
||||
break
|
||||
else:
|
||||
self.fail("unable to find any uri_extension files in %s"
|
||||
self.fail("unable to find any uri_extension files in %r"
|
||||
% self.basedir)
|
||||
log.msg("test_system.SystemTest._test_runner using %s" % filename)
|
||||
log.msg("test_system.SystemTest._test_runner using %r" % filename)
|
||||
|
||||
out,err = StringIO(), StringIO()
|
||||
rc = runner.runner(["debug", "dump-share", "--offsets",
|
||||
@ -1298,12 +1300,12 @@ class SystemTest(SystemTestMixin, unittest.TestCase):
|
||||
|
||||
# we only upload a single file, so we can assert some things about
|
||||
# its size and shares.
|
||||
self.failUnless(("share filename: %s" % filename) in output)
|
||||
self.failUnless("size: %d\n" % len(self.data) in output)
|
||||
self.failUnless("num_segments: 1\n" in output)
|
||||
self.failUnlessIn("share filename: %s" % quote_output(abspath_expanduser_unicode(filename)), output)
|
||||
self.failUnlessIn("size: %d\n" % len(self.data), output)
|
||||
self.failUnlessIn("num_segments: 1\n", output)
|
||||
# segment_size is always a multiple of needed_shares
|
||||
self.failUnless("segment_size: %d\n" % mathutil.next_multiple(len(self.data), 3) in output)
|
||||
self.failUnless("total_shares: 10\n" in output)
|
||||
self.failUnlessIn("segment_size: %d\n" % mathutil.next_multiple(len(self.data), 3), output)
|
||||
self.failUnlessIn("total_shares: 10\n", output)
|
||||
# keys which are supposed to be present
|
||||
for key in ("size", "num_segments", "segment_size",
|
||||
"needed_shares", "total_shares",
|
||||
@ -1311,8 +1313,8 @@ class SystemTest(SystemTestMixin, unittest.TestCase):
|
||||
#"plaintext_hash", "plaintext_root_hash",
|
||||
"crypttext_hash", "crypttext_root_hash",
|
||||
"share_root_hash", "UEB_hash"):
|
||||
self.failUnless("%s: " % key in output, key)
|
||||
self.failUnless(" verify-cap: URI:CHK-Verifier:" in output)
|
||||
self.failUnlessIn("%s: " % key, output)
|
||||
self.failUnlessIn(" verify-cap: URI:CHK-Verifier:", output)
|
||||
|
||||
# now use its storage index to find the other shares using the
|
||||
# 'find-shares' tool
|
||||
|
Loading…
x
Reference in New Issue
Block a user