mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-30 09:48:56 +00:00
Merge branch 'pr138': fix long paths on windows
This commit is contained in:
commit
3afe827ad0
@ -16,6 +16,7 @@ from allmydata.control import ControlServer
|
||||
from allmydata.introducer.client import IntroducerClient
|
||||
from allmydata.util import hashutil, base32, pollmixin, log, keyutil, idlib
|
||||
from allmydata.util.encodingutil import get_filesystem_encoding
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.abbreviate import parse_abbreviated_size
|
||||
from allmydata.util.time_format import parse_duration, parse_date
|
||||
from allmydata.stats import StatsProvider
|
||||
@ -450,8 +451,8 @@ class Client(node.Node, pollmixin.PollMixin):
|
||||
|
||||
from allmydata.webish import WebishServer
|
||||
nodeurl_path = os.path.join(self.basedir, "node.url")
|
||||
staticdir = self.get_config("node", "web.static", "public_html")
|
||||
staticdir = os.path.expanduser(staticdir)
|
||||
staticdir_config = self.get_config("node", "web.static", "public_html").decode("utf-8")
|
||||
staticdir = abspath_expanduser_unicode(staticdir_config, base=self.basedir)
|
||||
ws = WebishServer(self, webport, nodeurl_path, staticdir)
|
||||
self.add_service(ws)
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
import os
|
||||
|
||||
from zope.interface import implements
|
||||
from twisted.web.client import getPage
|
||||
from twisted.internet import defer
|
||||
@ -7,6 +8,8 @@ from twisted.conch import error as conch_error
|
||||
from twisted.conch.ssh import keys
|
||||
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
|
||||
|
||||
class NeedRootcapLookupScheme(Exception):
|
||||
"""Accountname+Password-based access schemes require some kind of
|
||||
@ -28,7 +31,7 @@ class AccountFileChecker:
|
||||
self.passwords = {}
|
||||
self.pubkeys = {}
|
||||
self.rootcaps = {}
|
||||
for line in open(os.path.expanduser(accountfile), "r"):
|
||||
for line in open(abspath_expanduser_unicode(accountfile), "r"):
|
||||
line = line.strip()
|
||||
if line.startswith("#") or not line:
|
||||
continue
|
||||
|
@ -1,5 +1,5 @@
|
||||
|
||||
import os, sys
|
||||
import sys
|
||||
|
||||
from twisted.internet import defer
|
||||
from twisted.python.filepath import FilePath
|
||||
@ -9,6 +9,7 @@ from foolscap.api import eventually
|
||||
from allmydata.interfaces import IDirectoryNode
|
||||
|
||||
from allmydata.util.encodingutil import quote_output, get_filesystem_encoding
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.immutable.upload import FileName
|
||||
|
||||
|
||||
@ -19,7 +20,7 @@ class DropUploader(service.MultiService):
|
||||
service.MultiService.__init__(self)
|
||||
|
||||
try:
|
||||
local_dir_u = os.path.expanduser(local_dir_utf8.decode('utf-8'))
|
||||
local_dir_u = abspath_expanduser_unicode(local_dir_utf8.decode('utf-8'))
|
||||
if sys.platform == "win32":
|
||||
local_dir = local_dir_u
|
||||
else:
|
||||
|
@ -6,7 +6,7 @@ from foolscap.api import Referenceable
|
||||
import allmydata
|
||||
from allmydata import node
|
||||
from allmydata.util import log, rrefutil
|
||||
from allmydata.util.encodingutil import get_filesystem_encoding
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.introducer.interfaces import \
|
||||
RIIntroducerPublisherAndSubscriberService_v2
|
||||
from allmydata.introducer.common import convert_announcement_v1_to_v2, \
|
||||
@ -21,7 +21,7 @@ class IntroducerNode(node.Node):
|
||||
NODETYPE = "introducer"
|
||||
GENERATED_FILES = ['introducer.furl']
|
||||
|
||||
def __init__(self, basedir="."):
|
||||
def __init__(self, basedir=u"."):
|
||||
node.Node.__init__(self, basedir)
|
||||
self.read_config()
|
||||
self.init_introducer()
|
||||
@ -33,8 +33,8 @@ class IntroducerNode(node.Node):
|
||||
introducerservice = IntroducerService(self.basedir)
|
||||
self.add_service(introducerservice)
|
||||
|
||||
old_public_fn = os.path.join(self.basedir, "introducer.furl").encode(get_filesystem_encoding())
|
||||
private_fn = os.path.join(self.basedir, "private", "introducer.furl").encode(get_filesystem_encoding())
|
||||
old_public_fn = os.path.join(self.basedir, u"introducer.furl")
|
||||
private_fn = os.path.join(self.basedir, u"private", u"introducer.furl")
|
||||
|
||||
if os.path.exists(old_public_fn):
|
||||
if os.path.exists(private_fn):
|
||||
@ -62,9 +62,9 @@ class IntroducerNode(node.Node):
|
||||
self.log("init_web(webport=%s)", args=(webport,), umid="2bUygA")
|
||||
|
||||
from allmydata.webish import IntroducerWebishServer
|
||||
nodeurl_path = os.path.join(self.basedir, "node.url")
|
||||
staticdir = self.get_config("node", "web.static", "public_html")
|
||||
staticdir = os.path.expanduser(staticdir)
|
||||
nodeurl_path = os.path.join(self.basedir, u"node.url")
|
||||
config_staticdir = self.get_config("node", "web.static", "public_html").decode('utf-8')
|
||||
staticdir = abspath_expanduser_unicode(config_staticdir, base=self.basedir)
|
||||
ws = IntroducerWebishServer(self, webport, nodeurl_path, staticdir)
|
||||
self.add_service(ws)
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
|
||||
# this is adapted from my code in Buildbot -warner
|
||||
|
||||
import os.path
|
||||
import binascii, base64
|
||||
|
||||
from twisted.python import log
|
||||
from twisted.application import service, strports
|
||||
from twisted.cred import checkers, portal
|
||||
@ -12,6 +12,8 @@ from twisted.internet import protocol
|
||||
|
||||
from zope.interface import implements
|
||||
|
||||
from allmydata.util.fileutil import precondition_abspath
|
||||
|
||||
# makeTelnetProtocol and _TelnetRealm are for the TelnetManhole
|
||||
|
||||
class makeTelnetProtocol:
|
||||
@ -63,7 +65,8 @@ class AuthorizedKeysChecker(conchc.SSHPublicKeyDatabase):
|
||||
"""
|
||||
|
||||
def __init__(self, authorized_keys_file):
|
||||
self.authorized_keys_file = os.path.expanduser(authorized_keys_file)
|
||||
precondition_abspath(authorized_keys_file)
|
||||
self.authorized_keys_file = authorized_keys_file
|
||||
|
||||
def checkKey(self, credentials):
|
||||
f = open(self.authorized_keys_file)
|
||||
@ -244,14 +247,12 @@ class AuthorizedKeysManhole(_BaseManhole):
|
||||
'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a
|
||||
simple tcp port.
|
||||
|
||||
@param keyfile: the name of a file (relative to the buildmaster's
|
||||
basedir) that contains SSH public keys of authorized
|
||||
users, one per line. This is the exact same format
|
||||
as used by sshd in ~/.ssh/authorized_keys .
|
||||
@param keyfile: the path of a file that contains SSH public keys of
|
||||
authorized users, one per line. This is the exact
|
||||
same format as used by sshd in ~/.ssh/authorized_keys .
|
||||
The path should be absolute.
|
||||
"""
|
||||
|
||||
# TODO: expanduser this, and make it relative to the buildmaster's
|
||||
# basedir
|
||||
self.keyfile = keyfile
|
||||
c = AuthorizedKeysChecker(keyfile)
|
||||
_BaseManhole.__init__(self, port, c)
|
||||
|
@ -93,12 +93,11 @@ class Node(service.MultiService):
|
||||
iputil.increase_rlimits()
|
||||
|
||||
def init_tempdir(self):
|
||||
local_tempdir_utf8 = "tmp" # default is NODEDIR/tmp/
|
||||
tempdir = self.get_config("node", "tempdir", local_tempdir_utf8).decode('utf-8')
|
||||
tempdir = os.path.join(self.basedir, tempdir)
|
||||
tempdir_config = self.get_config("node", "tempdir", "tmp").decode('utf-8')
|
||||
tempdir = abspath_expanduser_unicode(tempdir_config, base=self.basedir)
|
||||
if not os.path.exists(tempdir):
|
||||
fileutil.make_dirs(tempdir)
|
||||
tempfile.tempdir = abspath_expanduser_unicode(tempdir)
|
||||
tempfile.tempdir = tempdir
|
||||
# this should cause twisted.web.http (which uses
|
||||
# tempfile.TemporaryFile) to put large request bodies in the given
|
||||
# directory. Without this, the default temp dir is usually /tmp/,
|
||||
@ -220,11 +219,12 @@ class Node(service.MultiService):
|
||||
def setup_ssh(self):
|
||||
ssh_port = self.get_config("node", "ssh.port", "")
|
||||
if ssh_port:
|
||||
ssh_keyfile = self.get_config("node", "ssh.authorized_keys_file").decode('utf-8')
|
||||
ssh_keyfile_config = self.get_config("node", "ssh.authorized_keys_file").decode('utf-8')
|
||||
ssh_keyfile = abspath_expanduser_unicode(ssh_keyfile_config, base=self.basedir)
|
||||
from allmydata import manhole
|
||||
m = manhole.AuthorizedKeysManhole(ssh_port, ssh_keyfile.encode(get_filesystem_encoding()))
|
||||
m = manhole.AuthorizedKeysManhole(ssh_port, ssh_keyfile)
|
||||
m.setServiceParent(self)
|
||||
self.log("AuthorizedKeysManhole listening on %s" % ssh_port)
|
||||
self.log("AuthorizedKeysManhole listening on %s" % (ssh_port,))
|
||||
|
||||
def get_app_versions(self):
|
||||
# TODO: merge this with allmydata.get_package_versions
|
||||
|
@ -2,7 +2,7 @@ import os.path, re, fnmatch
|
||||
from twisted.python import usage
|
||||
from allmydata.scripts.common import get_aliases, get_default_nodedir, \
|
||||
DEFAULT_ALIAS, BaseOptions
|
||||
from allmydata.util.encodingutil import argv_to_unicode, argv_to_abspath, quote_output
|
||||
from allmydata.util.encodingutil import argv_to_unicode, argv_to_abspath, quote_local_unicode_path
|
||||
|
||||
NODEURL_RE=re.compile("http(s?)://([^:]*)(:([1-9][0-9]*))?")
|
||||
|
||||
@ -140,15 +140,11 @@ class GetOptions(FilesystemOptions):
|
||||
# tahoe get FOO bar # write to local file
|
||||
# tahoe get tahoe:FOO bar # same
|
||||
|
||||
if arg2 == "-":
|
||||
arg2 = None
|
||||
|
||||
self.from_file = argv_to_unicode(arg1)
|
||||
|
||||
if arg2:
|
||||
self.to_file = argv_to_unicode(arg2)
|
||||
else:
|
||||
self.to_file = None
|
||||
|
||||
if self.to_file == "-":
|
||||
self.to_file = None
|
||||
self.to_file = None if arg2 is None else argv_to_abspath(arg2)
|
||||
|
||||
def getSynopsis(self):
|
||||
return "Usage: %s [global-opts] get [options] REMOTE_FILE LOCAL_FILE" % (self.command_name,)
|
||||
@ -180,17 +176,11 @@ class PutOptions(FilesystemOptions):
|
||||
def parseArgs(self, arg1=None, arg2=None):
|
||||
# see Examples below
|
||||
|
||||
if arg1 is not None and arg2 is not None:
|
||||
self.from_file = argv_to_unicode(arg1)
|
||||
self.to_file = argv_to_unicode(arg2)
|
||||
elif arg1 is not None and arg2 is None:
|
||||
self.from_file = argv_to_unicode(arg1) # might be "-"
|
||||
self.to_file = None
|
||||
else:
|
||||
self.from_file = None
|
||||
self.to_file = None
|
||||
if self.from_file == u"-":
|
||||
self.from_file = None
|
||||
if arg1 == "-":
|
||||
arg1 = None
|
||||
|
||||
self.from_file = None if arg1 is None else argv_to_abspath(arg1)
|
||||
self.to_file = None if arg2 is None else argv_to_unicode(arg2)
|
||||
|
||||
if self['format']:
|
||||
if self['format'].upper() not in ("SDMF", "MDMF", "CHK"):
|
||||
@ -347,7 +337,7 @@ class BackupOptions(FilesystemOptions):
|
||||
self['exclude'] = set()
|
||||
|
||||
def parseArgs(self, localdir, topath):
|
||||
self.from_dir = argv_to_unicode(localdir)
|
||||
self.from_dir = argv_to_abspath(localdir)
|
||||
self.to_dir = argv_to_unicode(topath)
|
||||
|
||||
def getSynopsis(self):
|
||||
@ -368,7 +358,7 @@ class BackupOptions(FilesystemOptions):
|
||||
try:
|
||||
exclude_file = file(abs_filepath)
|
||||
except:
|
||||
raise BackupConfigurationError('Error opening exclude file %s.' % quote_output(abs_filepath))
|
||||
raise BackupConfigurationError('Error opening exclude file %s.' % quote_local_unicode_path(abs_filepath))
|
||||
try:
|
||||
for line in exclude_file:
|
||||
self.opt_exclude(line)
|
||||
|
@ -3,7 +3,8 @@ import os, sys, urllib
|
||||
import codecs
|
||||
from twisted.python import usage
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.encodingutil import unicode_to_url, quote_output, argv_to_abspath
|
||||
from allmydata.util.encodingutil import unicode_to_url, quote_output, \
|
||||
quote_local_unicode_path, argv_to_abspath
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
|
||||
|
||||
@ -40,7 +41,7 @@ class BasedirOptions(BaseOptions):
|
||||
|
||||
optParameters = [
|
||||
["basedir", "C", None, "Specify which Tahoe base directory should be used. [default: %s]"
|
||||
% get_default_nodedir()],
|
||||
% quote_local_unicode_path(_default_nodedir)],
|
||||
]
|
||||
|
||||
def parseArgs(self, basedir=None):
|
||||
@ -196,5 +197,6 @@ def get_alias(aliases, path_unicode, default):
|
||||
return uri.from_string_dirnode(aliases[alias]).to_string(), path[colon+1:]
|
||||
|
||||
def escape_path(path):
|
||||
# this always returns bytes, specifically US-ASCII, valid URL characters
|
||||
segments = path.split("/")
|
||||
return "/".join([urllib.quote(unicode_to_url(s)) for s in segments])
|
||||
|
@ -645,15 +645,15 @@ def find_shares(options):
|
||||
/home/warner/testnet/node-2/storage/shares/44k/44kai1tui348689nrw8fjegc8c/2
|
||||
"""
|
||||
from allmydata.storage.server import si_a2b, storage_index_to_dir
|
||||
from allmydata.util.encodingutil import listdir_unicode
|
||||
from allmydata.util.encodingutil import listdir_unicode, quote_local_unicode_path
|
||||
|
||||
out = options.stdout
|
||||
sharedir = storage_index_to_dir(si_a2b(options.si_s))
|
||||
for d in options.nodedirs:
|
||||
d = os.path.join(d, "storage/shares", sharedir)
|
||||
d = os.path.join(d, "storage", "shares", sharedir)
|
||||
if os.path.exists(d):
|
||||
for shnum in listdir_unicode(d):
|
||||
print >>out, os.path.join(d, shnum)
|
||||
print >>out, quote_local_unicode_path(os.path.join(d, shnum), quotemarks=False)
|
||||
|
||||
return 0
|
||||
|
||||
@ -832,7 +832,7 @@ def catalog_shares(options):
|
||||
err = options.stderr
|
||||
now = time.time()
|
||||
for d in options.nodedirs:
|
||||
d = os.path.join(d, "storage/shares")
|
||||
d = os.path.join(d, "storage", "shares")
|
||||
try:
|
||||
abbrevs = listdir_unicode(d)
|
||||
except EnvironmentError:
|
||||
|
@ -6,7 +6,7 @@ from twisted.python import usage
|
||||
|
||||
from allmydata.scripts.common import get_default_nodedir
|
||||
from allmydata.scripts import debug, create_node, startstop_node, cli, keygen, stats_gatherer, admin
|
||||
from allmydata.util.encodingutil import quote_output, get_io_encoding
|
||||
from allmydata.util.encodingutil import quote_output, quote_local_unicode_path, get_io_encoding
|
||||
|
||||
def GROUP(s):
|
||||
# Usage.parseOptions compares argv[1] against command[0], so it will
|
||||
@ -25,7 +25,7 @@ NODEDIR_HELP = ("Specify which Tahoe node directory should be used. The "
|
||||
"' which contains the mapping from alias name to root "
|
||||
"dirnode URI.")
|
||||
if _default_nodedir:
|
||||
NODEDIR_HELP += " [default for most commands: " + quote_output(_default_nodedir) + "]"
|
||||
NODEDIR_HELP += " [default for most commands: " + quote_local_unicode_path(_default_nodedir) + "]"
|
||||
|
||||
class Options(usage.Options):
|
||||
# unit tests can override these to point at StringIO instances
|
||||
|
@ -4,7 +4,7 @@ from allmydata.scripts.common import BasedirOptions
|
||||
from twisted.scripts import twistd
|
||||
from twisted.python import usage
|
||||
from allmydata.util import fileutil
|
||||
from allmydata.util.encodingutil import listdir_unicode, quote_output
|
||||
from allmydata.util.encodingutil import listdir_unicode, quote_local_unicode_path
|
||||
|
||||
|
||||
class StartOptions(BasedirOptions):
|
||||
@ -92,13 +92,14 @@ def identify_node_type(basedir):
|
||||
|
||||
def start(config, out=sys.stdout, err=sys.stderr):
|
||||
basedir = config['basedir']
|
||||
print >>out, "STARTING", quote_output(basedir)
|
||||
quoted_basedir = quote_local_unicode_path(basedir)
|
||||
print >>out, "STARTING", quoted_basedir
|
||||
if not os.path.isdir(basedir):
|
||||
print >>err, "%s does not look like a directory at all" % quote_output(basedir)
|
||||
print >>err, "%s does not look like a directory at all" % quoted_basedir
|
||||
return 1
|
||||
nodetype = identify_node_type(basedir)
|
||||
if not nodetype:
|
||||
print >>err, "%s is not a recognizable node directory" % quote_output(basedir)
|
||||
print >>err, "%s is not a recognizable node directory" % quoted_basedir
|
||||
return 1
|
||||
# Now prepare to turn into a twistd process. This os.chdir is the point
|
||||
# of no return.
|
||||
@ -108,7 +109,7 @@ def start(config, out=sys.stdout, err=sys.stderr):
|
||||
and "--nodaemon" not in config.twistd_args
|
||||
and "--syslog" not in config.twistd_args
|
||||
and "--logfile" not in config.twistd_args):
|
||||
fileutil.make_dirs(os.path.join(basedir, "logs"))
|
||||
fileutil.make_dirs(os.path.join(basedir, u"logs"))
|
||||
twistd_args.extend(["--logfile", os.path.join("logs", "twistd.log")])
|
||||
twistd_args.extend(config.twistd_args)
|
||||
twistd_args.append("StartTahoeNode") # point at our StartTahoeNodePlugin
|
||||
@ -154,17 +155,18 @@ def start(config, out=sys.stdout, err=sys.stderr):
|
||||
else:
|
||||
verb = "starting"
|
||||
|
||||
print >>out, "%s node in %s" % (verb, basedir)
|
||||
print >>out, "%s node in %s" % (verb, quoted_basedir)
|
||||
twistd.runApp(twistd_config)
|
||||
# we should only reach here if --nodaemon or equivalent was used
|
||||
return 0
|
||||
|
||||
def stop(config, out=sys.stdout, err=sys.stderr):
|
||||
basedir = config['basedir']
|
||||
print >>out, "STOPPING", quote_output(basedir)
|
||||
pidfile = os.path.join(basedir, "twistd.pid")
|
||||
quoted_basedir = quote_local_unicode_path(basedir)
|
||||
print >>out, "STOPPING", quoted_basedir
|
||||
pidfile = os.path.join(basedir, u"twistd.pid")
|
||||
if not os.path.exists(pidfile):
|
||||
print >>err, "%s does not look like a running node directory (no twistd.pid)" % quote_output(basedir)
|
||||
print >>err, "%s does not look like a running node directory (no twistd.pid)" % quoted_basedir
|
||||
# we define rc=2 to mean "nothing is running, but it wasn't me who
|
||||
# stopped it"
|
||||
return 2
|
||||
|
@ -10,9 +10,9 @@ from allmydata.scripts.common_http import do_http, HTTPError, format_http_error
|
||||
from allmydata.util import time_format
|
||||
from allmydata.scripts import backupdb
|
||||
from allmydata.util.encodingutil import listdir_unicode, quote_output, \
|
||||
to_str, FilenameEncodingError, unicode_to_url
|
||||
quote_local_unicode_path, to_str, FilenameEncodingError, unicode_to_url
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode, precondition_abspath
|
||||
|
||||
|
||||
def get_local_metadata(path):
|
||||
@ -160,10 +160,11 @@ class BackerUpper:
|
||||
print >>self.options.stderr, msg
|
||||
|
||||
def process(self, localpath):
|
||||
precondition(isinstance(localpath, unicode), localpath)
|
||||
precondition_abspath(localpath)
|
||||
# returns newdircap
|
||||
|
||||
self.verboseprint("processing %s" % quote_output(localpath))
|
||||
quoted_path = quote_local_unicode_path(localpath)
|
||||
self.verboseprint("processing %s" % (quoted_path,))
|
||||
create_contents = {} # childname -> (type, rocap, metadata)
|
||||
compare_contents = {} # childname -> rocap
|
||||
|
||||
@ -171,11 +172,11 @@ class BackerUpper:
|
||||
children = listdir_unicode(localpath)
|
||||
except EnvironmentError:
|
||||
self.directories_skipped += 1
|
||||
self.warn("WARNING: permission denied on directory %s" % quote_output(localpath))
|
||||
self.warn("WARNING: permission denied on directory %s" % (quoted_path,))
|
||||
children = []
|
||||
except FilenameEncodingError:
|
||||
self.directories_skipped += 1
|
||||
self.warn("WARNING: could not list directory %s due to a filename encoding error" % quote_output(localpath))
|
||||
self.warn("WARNING: could not list directory %s due to a filename encoding error" % (quoted_path,))
|
||||
children = []
|
||||
|
||||
for child in self.options.filter_listdir(children):
|
||||
@ -197,17 +198,17 @@ class BackerUpper:
|
||||
compare_contents[child] = childcap
|
||||
except EnvironmentError:
|
||||
self.files_skipped += 1
|
||||
self.warn("WARNING: permission denied on file %s" % quote_output(childpath))
|
||||
self.warn("WARNING: permission denied on file %s" % quote_local_unicode_path(childpath))
|
||||
else:
|
||||
self.files_skipped += 1
|
||||
if os.path.islink(childpath):
|
||||
self.warn("WARNING: cannot backup symlink %s" % quote_output(childpath))
|
||||
self.warn("WARNING: cannot backup symlink %s" % quote_local_unicode_path(childpath))
|
||||
else:
|
||||
self.warn("WARNING: cannot backup special file %s" % quote_output(childpath))
|
||||
self.warn("WARNING: cannot backup special file %s" % quote_local_unicode_path(childpath))
|
||||
|
||||
must_create, r = self.check_backupdb_directory(compare_contents)
|
||||
if must_create:
|
||||
self.verboseprint(" creating directory for %s" % quote_output(localpath))
|
||||
self.verboseprint(" creating directory for %s" % quote_local_unicode_path(localpath))
|
||||
newdircap = mkdir(create_contents, self.options)
|
||||
assert isinstance(newdircap, str)
|
||||
if r:
|
||||
@ -215,7 +216,7 @@ class BackerUpper:
|
||||
self.directories_created += 1
|
||||
return newdircap
|
||||
else:
|
||||
self.verboseprint(" re-using old directory for %s" % quote_output(localpath))
|
||||
self.verboseprint(" re-using old directory for %s" % quote_local_unicode_path(localpath))
|
||||
self.directories_reused += 1
|
||||
return r.was_created()
|
||||
|
||||
@ -288,16 +289,16 @@ class BackerUpper:
|
||||
|
||||
# This function will raise an IOError exception when called on an unreadable file
|
||||
def upload(self, childpath):
|
||||
precondition(isinstance(childpath, unicode), childpath)
|
||||
precondition_abspath(childpath)
|
||||
|
||||
#self.verboseprint("uploading %s.." % quote_output(childpath))
|
||||
#self.verboseprint("uploading %s.." % quote_local_unicode_path(childpath))
|
||||
metadata = get_local_metadata(childpath)
|
||||
|
||||
# we can use the backupdb here
|
||||
must_upload, bdb_results = self.check_backupdb_file(childpath)
|
||||
|
||||
if must_upload:
|
||||
self.verboseprint("uploading %s.." % quote_output(childpath))
|
||||
self.verboseprint("uploading %s.." % quote_local_unicode_path(childpath))
|
||||
infileobj = open(childpath, "rb")
|
||||
url = self.options['node-url'] + "uri"
|
||||
resp = do_http("PUT", url, infileobj)
|
||||
@ -305,7 +306,7 @@ class BackerUpper:
|
||||
raise HTTPError("Error during file PUT", resp)
|
||||
|
||||
filecap = resp.read().strip()
|
||||
self.verboseprint(" %s -> %s" % (quote_output(childpath, quotemarks=False),
|
||||
self.verboseprint(" %s -> %s" % (quote_local_unicode_path(childpath, quotemarks=False),
|
||||
quote_output(filecap, quotemarks=False)))
|
||||
#self.verboseprint(" metadata: %s" % (quote_output(metadata, quotemarks=False),))
|
||||
|
||||
@ -316,7 +317,7 @@ class BackerUpper:
|
||||
return filecap, metadata
|
||||
|
||||
else:
|
||||
self.verboseprint("skipping %s.." % quote_output(childpath))
|
||||
self.verboseprint("skipping %s.." % quote_local_unicode_path(childpath))
|
||||
self.files_reused += 1
|
||||
return bdb_results.was_uploaded(), metadata
|
||||
|
||||
|
@ -9,14 +9,15 @@ from allmydata.scripts.common import get_alias, escape_path, \
|
||||
from allmydata.scripts.common_http import do_http, HTTPError
|
||||
from allmydata import uri
|
||||
from allmydata.util import fileutil
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.encodingutil import unicode_to_url, listdir_unicode, quote_output, to_str
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode, precondition_abspath
|
||||
from allmydata.util.encodingutil import unicode_to_url, listdir_unicode, quote_output, \
|
||||
quote_local_unicode_path, to_str
|
||||
from allmydata.util.assertutil import precondition
|
||||
|
||||
|
||||
class MissingSourceError(TahoeError):
|
||||
def __init__(self, name):
|
||||
TahoeError.__init__(self, "No such file or directory %s" % quote_output(name))
|
||||
def __init__(self, name, quotefn=quote_output):
|
||||
TahoeError.__init__(self, "No such file or directory %s" % quotefn(name))
|
||||
|
||||
|
||||
def GET_to_file(url):
|
||||
@ -61,37 +62,34 @@ def make_tahoe_subdirectory(nodeurl, parent_writecap, name):
|
||||
|
||||
class LocalFileSource:
|
||||
def __init__(self, pathname):
|
||||
precondition(isinstance(pathname, unicode), pathname)
|
||||
precondition_abspath(pathname)
|
||||
self.pathname = pathname
|
||||
|
||||
def need_to_copy_bytes(self):
|
||||
return True
|
||||
|
||||
def open(self, caps_only):
|
||||
return open(os.path.expanduser(self.pathname), "rb")
|
||||
|
||||
return open(self.pathname, "rb")
|
||||
|
||||
class LocalFileTarget:
|
||||
def __init__(self, pathname):
|
||||
precondition(isinstance(pathname, unicode), pathname)
|
||||
precondition_abspath(pathname)
|
||||
self.pathname = pathname
|
||||
|
||||
def put_file(self, inf):
|
||||
fileutil.put_file(self.pathname, inf)
|
||||
|
||||
|
||||
class LocalMissingTarget:
|
||||
def __init__(self, pathname):
|
||||
precondition(isinstance(pathname, unicode), pathname)
|
||||
precondition_abspath(pathname)
|
||||
self.pathname = pathname
|
||||
|
||||
def put_file(self, inf):
|
||||
fileutil.put_file(self.pathname, inf)
|
||||
|
||||
|
||||
class LocalDirectorySource:
|
||||
def __init__(self, progressfunc, pathname):
|
||||
precondition(isinstance(pathname, unicode), pathname)
|
||||
precondition_abspath(pathname)
|
||||
|
||||
self.progressfunc = progressfunc
|
||||
self.pathname = pathname
|
||||
@ -119,7 +117,7 @@ class LocalDirectorySource:
|
||||
|
||||
class LocalDirectoryTarget:
|
||||
def __init__(self, progressfunc, pathname):
|
||||
precondition(isinstance(pathname, unicode), pathname)
|
||||
precondition_abspath(pathname)
|
||||
|
||||
self.progressfunc = progressfunc
|
||||
self.pathname = pathname
|
||||
@ -132,7 +130,6 @@ class LocalDirectoryTarget:
|
||||
children = listdir_unicode(self.pathname)
|
||||
for i,n in enumerate(children):
|
||||
self.progressfunc("examining %d of %d" % (i+1, len(children)))
|
||||
n = unicode(n)
|
||||
pn = os.path.join(self.pathname, n)
|
||||
if os.path.isdir(pn):
|
||||
child = LocalDirectoryTarget(self.progressfunc, pn)
|
||||
@ -144,6 +141,7 @@ class LocalDirectoryTarget:
|
||||
self.children[n] = LocalFileTarget(pn)
|
||||
|
||||
def get_child_target(self, name):
|
||||
precondition(isinstance(name, unicode), name)
|
||||
if self.children is None:
|
||||
self.populate(False)
|
||||
if name in self.children:
|
||||
@ -160,6 +158,7 @@ class LocalDirectoryTarget:
|
||||
def set_children(self):
|
||||
pass
|
||||
|
||||
|
||||
class TahoeFileSource:
|
||||
def __init__(self, nodeurl, mutable, writecap, readcap):
|
||||
self.nodeurl = nodeurl
|
||||
@ -371,6 +370,7 @@ class TahoeDirectoryTarget:
|
||||
|
||||
def get_child_target(self, name):
|
||||
# return a new target for a named subdirectory of this dir
|
||||
precondition(isinstance(name, unicode), name)
|
||||
if self.children is None:
|
||||
self.populate(False)
|
||||
if name in self.children:
|
||||
@ -383,6 +383,7 @@ class TahoeDirectoryTarget:
|
||||
return child
|
||||
|
||||
def put_file(self, name, inf):
|
||||
precondition(isinstance(name, unicode), name)
|
||||
url = self.nodeurl + "uri"
|
||||
if not hasattr(inf, "seek"):
|
||||
inf = inf.read()
|
||||
@ -402,6 +403,7 @@ class TahoeDirectoryTarget:
|
||||
self.new_children[name] = filecap
|
||||
|
||||
def put_uri(self, name, filecap):
|
||||
precondition(isinstance(name, unicode), name)
|
||||
self.new_children[name] = filecap
|
||||
|
||||
def set_children(self):
|
||||
@ -518,16 +520,23 @@ class Copier:
|
||||
def to_stderr(self, text):
|
||||
print >>self.stderr, text
|
||||
|
||||
# FIXME reduce the amount of near-duplicate code between get_target_info
|
||||
# and get_source_info.
|
||||
|
||||
def get_target_info(self, destination_spec):
|
||||
rootcap, path = get_alias(self.aliases, destination_spec, None)
|
||||
precondition(isinstance(destination_spec, unicode), destination_spec)
|
||||
rootcap, path_utf8 = get_alias(self.aliases, destination_spec, None)
|
||||
path = path_utf8.decode("utf-8")
|
||||
if rootcap == DefaultAliasMarker:
|
||||
# no alias, so this is a local file
|
||||
pathname = abspath_expanduser_unicode(path.decode('utf-8'))
|
||||
pathname = abspath_expanduser_unicode(path)
|
||||
if not os.path.exists(pathname):
|
||||
t = LocalMissingTarget(pathname)
|
||||
elif os.path.isdir(pathname):
|
||||
t = LocalDirectoryTarget(self.progress, pathname)
|
||||
else:
|
||||
# TODO: should this be _assert? what happens if the target is
|
||||
# a special file?
|
||||
assert os.path.isfile(pathname), pathname
|
||||
t = LocalFileTarget(pathname) # non-empty
|
||||
else:
|
||||
@ -559,13 +568,15 @@ class Copier:
|
||||
return t
|
||||
|
||||
def get_source_info(self, source_spec):
|
||||
rootcap, path = get_alias(self.aliases, source_spec, None)
|
||||
precondition(isinstance(source_spec, unicode), source_spec)
|
||||
rootcap, path_utf8 = get_alias(self.aliases, source_spec, None)
|
||||
path = path_utf8.decode("utf-8")
|
||||
if rootcap == DefaultAliasMarker:
|
||||
# no alias, so this is a local file
|
||||
pathname = abspath_expanduser_unicode(path.decode('utf-8'))
|
||||
pathname = abspath_expanduser_unicode(path)
|
||||
name = os.path.basename(pathname)
|
||||
if not os.path.exists(pathname):
|
||||
raise MissingSourceError(source_spec)
|
||||
raise MissingSourceError(source_spec, quotefn=quote_local_unicode_path)
|
||||
if os.path.isdir(pathname):
|
||||
t = LocalDirectorySource(self.progress, pathname)
|
||||
else:
|
||||
@ -577,9 +588,9 @@ class Copier:
|
||||
name = None
|
||||
if path:
|
||||
url += "/" + escape_path(path)
|
||||
last_slash = path.rfind("/")
|
||||
last_slash = path.rfind(u"/")
|
||||
name = path
|
||||
if last_slash:
|
||||
if last_slash != -1:
|
||||
name = path[last_slash+1:]
|
||||
|
||||
resp = do_http("GET", url + "?t=json")
|
||||
@ -598,8 +609,13 @@ class Copier:
|
||||
writecap = to_str(d.get("rw_uri"))
|
||||
readcap = to_str(d.get("ro_uri"))
|
||||
mutable = d.get("mutable", False) # older nodes don't provide it
|
||||
if source_spec.rfind('/') != -1:
|
||||
name = source_spec[source_spec.rfind('/')+1:]
|
||||
|
||||
last_slash = source_spec.rfind(u"/")
|
||||
if last_slash != -1:
|
||||
# TODO: this looks funny and redundant with the 'name'
|
||||
# assignment above. cf #2329
|
||||
name = source_spec[last_slash+1:]
|
||||
|
||||
t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap)
|
||||
return name, t
|
||||
|
||||
@ -679,6 +695,7 @@ class Copier:
|
||||
return self.announce_success("files copied")
|
||||
|
||||
def attach_to_target(self, source, name, target):
|
||||
precondition(isinstance(name, unicode), name)
|
||||
if target not in self.targetmap:
|
||||
self.targetmap[target] = {}
|
||||
self.targetmap[target][name] = source
|
||||
@ -686,7 +703,7 @@ class Copier:
|
||||
|
||||
def assign_targets(self, source, target):
|
||||
# copy everything in the source into the target
|
||||
assert isinstance(source, (LocalDirectorySource, TahoeDirectorySource))
|
||||
precondition(isinstance(source, (LocalDirectorySource, TahoeDirectorySource)), source)
|
||||
|
||||
for name, child in source.children.items():
|
||||
if isinstance(child, (LocalDirectorySource, TahoeDirectorySource)):
|
||||
@ -694,14 +711,12 @@ class Copier:
|
||||
subtarget = target.get_child_target(name)
|
||||
self.assign_targets(child, subtarget)
|
||||
else:
|
||||
assert isinstance(child, (LocalFileSource, TahoeFileSource))
|
||||
precondition(isinstance(child, (LocalFileSource, TahoeFileSource)), child)
|
||||
self.attach_to_target(child, name, target)
|
||||
|
||||
|
||||
|
||||
def copy_files_to_target(self, targetmap, target):
|
||||
for name, source in targetmap.items():
|
||||
assert isinstance(source, (LocalFileSource, TahoeFileSource))
|
||||
precondition(isinstance(source, (LocalFileSource, TahoeFileSource)), source)
|
||||
self.copy_file_into(source, name, target)
|
||||
self.files_copied += 1
|
||||
self.progress("%d/%d files, %d/%d directories" %
|
||||
@ -723,9 +738,9 @@ class Copier:
|
||||
return 0
|
||||
|
||||
def copy_file(self, source, target):
|
||||
assert isinstance(source, (LocalFileSource, TahoeFileSource))
|
||||
assert isinstance(target, (LocalFileTarget, TahoeFileTarget,
|
||||
LocalMissingTarget, TahoeMissingTarget))
|
||||
precondition(isinstance(source, (LocalFileSource, TahoeFileSource)), source)
|
||||
precondition(isinstance(target, (LocalFileTarget, TahoeFileTarget,
|
||||
LocalMissingTarget, TahoeMissingTarget)), target)
|
||||
if self.need_to_copy_bytes(source, target):
|
||||
# if the target is a local directory, this will just write the
|
||||
# bytes to disk. If it is a tahoe directory, it will upload the
|
||||
@ -740,8 +755,9 @@ class Copier:
|
||||
return self.announce_success("file linked")
|
||||
|
||||
def copy_file_into(self, source, name, target):
|
||||
assert isinstance(source, (LocalFileSource, TahoeFileSource))
|
||||
assert isinstance(target, (LocalDirectoryTarget, TahoeDirectoryTarget))
|
||||
precondition(isinstance(source, (LocalFileSource, TahoeFileSource)), source)
|
||||
precondition(isinstance(target, (LocalDirectoryTarget, TahoeDirectoryTarget)), target)
|
||||
precondition(isinstance(name, unicode), name)
|
||||
if self.need_to_copy_bytes(source, target):
|
||||
# if the target is a local directory, this will just write the
|
||||
# bytes to disk. If it is a tahoe directory, it will upload the
|
||||
|
@ -1,5 +1,5 @@
|
||||
|
||||
import os, urllib
|
||||
import urllib
|
||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
UnknownAliasError
|
||||
from allmydata.scripts.common_http import do_http, format_http_error
|
||||
@ -26,7 +26,7 @@ def get(options):
|
||||
resp = do_http("GET", url)
|
||||
if resp.status in (200, 201,):
|
||||
if to_file:
|
||||
outf = open(os.path.expanduser(to_file), "wb")
|
||||
outf = open(to_file, "wb")
|
||||
else:
|
||||
outf = stdout
|
||||
while True:
|
||||
|
@ -1,7 +1,7 @@
|
||||
|
||||
import os
|
||||
from cStringIO import StringIO
|
||||
import urllib
|
||||
|
||||
from allmydata.scripts.common_http import do_http, format_http_success, format_http_error
|
||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
UnknownAliasError
|
||||
@ -73,7 +73,7 @@ def put(options):
|
||||
url += "?" + "&".join(queryargs)
|
||||
|
||||
if from_file:
|
||||
infileobj = open(os.path.expanduser(from_file), "rb")
|
||||
infileobj = open(from_file, "rb")
|
||||
else:
|
||||
# do_http() can't use stdin directly: for one thing, we need a
|
||||
# Content-Length field. So we currently must copy it.
|
||||
|
@ -11,8 +11,8 @@ from twisted.application.internet import TimerService
|
||||
from zope.interface import implements
|
||||
from foolscap.api import eventually, DeadReferenceError, Referenceable, Tub
|
||||
|
||||
from allmydata.util import log
|
||||
from allmydata.util.encodingutil import quote_output
|
||||
from allmydata.util import log, fileutil
|
||||
from allmydata.util.encodingutil import quote_local_unicode_path
|
||||
from allmydata.interfaces import RIStatsProvider, RIStatsGatherer, IStatsProducer
|
||||
|
||||
class LoadMonitor(service.MultiService):
|
||||
@ -246,7 +246,7 @@ class StdOutStatsGatherer(StatsGatherer):
|
||||
class PickleStatsGatherer(StdOutStatsGatherer):
|
||||
# inherit from StdOutStatsGatherer for connect/disconnect notifications
|
||||
|
||||
def __init__(self, basedir=".", verbose=True):
|
||||
def __init__(self, basedir=u".", verbose=True):
|
||||
self.verbose = verbose
|
||||
StatsGatherer.__init__(self, basedir)
|
||||
self.picklefile = os.path.join(basedir, "stats.pickle")
|
||||
@ -258,7 +258,7 @@ class PickleStatsGatherer(StdOutStatsGatherer):
|
||||
except Exception:
|
||||
print ("Error while attempting to load pickle file %s.\n"
|
||||
"You may need to restore this file from a backup, or delete it if no backup is available.\n" %
|
||||
quote_output(os.path.abspath(self.picklefile)))
|
||||
quote_local_unicode_path(self.picklefile))
|
||||
raise
|
||||
f.close()
|
||||
else:
|
||||
@ -311,7 +311,7 @@ class StatsGathererService(service.MultiService):
|
||||
def save_portnum(self, junk):
|
||||
portnum = self.listener.getPortnum()
|
||||
portnumfile = os.path.join(self.basedir, 'portnum')
|
||||
open(portnumfile, 'wb').write('%d\n' % (portnum,))
|
||||
fileutil.write(portnumfile, '%d\n' % (portnum,))
|
||||
|
||||
def tub_ready(self, ignored):
|
||||
ff = os.path.join(self.basedir, self.furl_file)
|
||||
|
@ -5,6 +5,8 @@ from twisted.conch import error as conch_error
|
||||
from twisted.conch.ssh import keys
|
||||
|
||||
from allmydata.frontends import auth
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
|
||||
|
||||
DUMMY_KEY = keys.Key.fromString("""\
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
@ -37,7 +39,8 @@ class AccountFileCheckerKeyTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.account_file = filepath.FilePath(self.mktemp())
|
||||
self.account_file.setContent(DUMMY_ACCOUNTS)
|
||||
self.checker = auth.AccountFileChecker(None, self.account_file.path)
|
||||
abspath = abspath_expanduser_unicode(unicode(self.account_file.path))
|
||||
self.checker = auth.AccountFileChecker(None, abspath)
|
||||
|
||||
def test_unknown_user(self):
|
||||
"""
|
||||
|
@ -2509,6 +2509,28 @@ starting copy, 2 files, 1 directories
|
||||
d.addCallback(_check_local_fs)
|
||||
return d
|
||||
|
||||
def test_ticket_2027(self):
|
||||
# This test ensures that tahoe will copy a file from the grid to
|
||||
# a local directory without a specified file name.
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2027
|
||||
self.basedir = "cli/Cp/cp_verbose"
|
||||
self.set_up_grid()
|
||||
|
||||
# Write a test file, which we'll copy to the grid.
|
||||
test1_path = os.path.join(self.basedir, "test1")
|
||||
fileutil.write(test1_path, "test1")
|
||||
|
||||
d = self.do_cli("create-alias", "tahoe")
|
||||
d.addCallback(lambda ign:
|
||||
self.do_cli("cp", test1_path, "tahoe:"))
|
||||
d.addCallback(lambda ign:
|
||||
self.do_cli("cp", "tahoe:test1", self.basedir))
|
||||
def _check(res):
|
||||
(rc, out, err) = res
|
||||
self.failUnlessIn("Success: file copied", out, str(res))
|
||||
return d
|
||||
|
||||
|
||||
class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
|
||||
|
||||
def writeto(self, path, data):
|
||||
@ -2854,7 +2876,8 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
|
||||
def _check((rc, out, err)):
|
||||
self.failUnlessReallyEqual(rc, 2)
|
||||
foo2 = os.path.join(source, "foo2.txt")
|
||||
self.failUnlessReallyEqual(err, "WARNING: cannot backup symlink '%s'\n" % foo2)
|
||||
self.failUnlessIn("WARNING: cannot backup symlink ", err)
|
||||
self.failUnlessIn(foo2, err)
|
||||
|
||||
fu, fr, fs, dc, dr, ds = self.count_output(out)
|
||||
# foo.txt
|
||||
@ -3779,7 +3802,7 @@ class Webopen(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
raise
|
||||
return d
|
||||
|
||||
class Options(unittest.TestCase):
|
||||
class Options(ReallyEqualMixin, unittest.TestCase):
|
||||
# this test case only looks at argument-processing and simple stuff.
|
||||
|
||||
def parse(self, args, stdout=None):
|
||||
@ -3861,17 +3884,17 @@ class Options(unittest.TestCase):
|
||||
# option after, or a basedir argument after, but none in the wrong
|
||||
# place, and not more than one of the three.
|
||||
o = self.parse(["start"])
|
||||
self.failUnlessEqual(o["basedir"], os.path.join(os.path.expanduser("~"),
|
||||
".tahoe"))
|
||||
self.failUnlessReallyEqual(o["basedir"], os.path.join(fileutil.abspath_expanduser_unicode(u"~"),
|
||||
u".tahoe"))
|
||||
o = self.parse(["start", "here"])
|
||||
self.failUnlessEqual(o["basedir"], os.path.abspath("here"))
|
||||
self.failUnlessReallyEqual(o["basedir"], fileutil.abspath_expanduser_unicode(u"here"))
|
||||
o = self.parse(["start", "--basedir", "there"])
|
||||
self.failUnlessEqual(o["basedir"], os.path.abspath("there"))
|
||||
self.failUnlessReallyEqual(o["basedir"], fileutil.abspath_expanduser_unicode(u"there"))
|
||||
o = self.parse(["--node-directory", "there", "start"])
|
||||
self.failUnlessEqual(o["basedir"], os.path.abspath("there"))
|
||||
self.failUnlessReallyEqual(o["basedir"], fileutil.abspath_expanduser_unicode(u"there"))
|
||||
|
||||
o = self.parse(["start", "here", "--nodaemon"])
|
||||
self.failUnlessEqual(o["basedir"], os.path.abspath("here"))
|
||||
self.failUnlessReallyEqual(o["basedir"], fileutil.abspath_expanduser_unicode(u"here"))
|
||||
|
||||
self.failUnlessRaises(usage.UsageError, self.parse,
|
||||
["--basedir", "there", "start"])
|
||||
|
@ -1,4 +1,4 @@
|
||||
import os
|
||||
import os, sys
|
||||
from twisted.trial import unittest
|
||||
from twisted.application import service
|
||||
|
||||
@ -6,6 +6,7 @@ import allmydata
|
||||
from allmydata.node import Node, OldConfigError, OldConfigOptionError, MissingConfigEntry, UnescapedHashError
|
||||
from allmydata import client
|
||||
from allmydata.storage_client import StorageFarmBroker
|
||||
from allmydata.manhole import AuthorizedKeysManhole
|
||||
from allmydata.util import base32, fileutil
|
||||
from allmydata.interfaces import IFilesystemNode, IFileNode, \
|
||||
IImmutableFileNode, IMutableFileNode, IDirectoryNode
|
||||
@ -68,10 +69,11 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
fileutil.write(os.path.join(basedir, "debug_discard_storage"), "")
|
||||
|
||||
e = self.failUnlessRaises(OldConfigError, client.Client, basedir)
|
||||
self.failUnlessIn(os.path.abspath(os.path.join(basedir, "introducer.furl")), e.args[0])
|
||||
self.failUnlessIn(os.path.abspath(os.path.join(basedir, "no_storage")), e.args[0])
|
||||
self.failUnlessIn(os.path.abspath(os.path.join(basedir, "readonly_storage")), e.args[0])
|
||||
self.failUnlessIn(os.path.abspath(os.path.join(basedir, "debug_discard_storage")), e.args[0])
|
||||
abs_basedir = fileutil.abspath_expanduser_unicode(unicode(basedir)).encode(sys.getfilesystemencoding())
|
||||
self.failUnlessIn(os.path.join(abs_basedir, "introducer.furl"), e.args[0])
|
||||
self.failUnlessIn(os.path.join(abs_basedir, "no_storage"), e.args[0])
|
||||
self.failUnlessIn(os.path.join(abs_basedir, "readonly_storage"), e.args[0])
|
||||
self.failUnlessIn(os.path.join(abs_basedir, "debug_discard_storage"), e.args[0])
|
||||
|
||||
for oldfile in ['introducer.furl', 'no_storage', 'readonly_storage',
|
||||
'debug_discard_storage']:
|
||||
@ -173,6 +175,34 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
"reserved_space = bogus\n")
|
||||
self.failUnlessRaises(ValueError, client.Client, basedir)
|
||||
|
||||
def test_web_staticdir(self):
|
||||
basedir = u"client.Basic.test_web_staticdir"
|
||||
os.mkdir(basedir)
|
||||
fileutil.write(os.path.join(basedir, "tahoe.cfg"),
|
||||
BASECONFIG +
|
||||
"[node]\n" +
|
||||
"web.port = tcp:0:interface=127.0.0.1\n" +
|
||||
"web.static = relative\n")
|
||||
c = client.Client(basedir)
|
||||
w = c.getServiceNamed("webish")
|
||||
abs_basedir = fileutil.abspath_expanduser_unicode(basedir)
|
||||
expected = fileutil.abspath_expanduser_unicode(u"relative", abs_basedir)
|
||||
self.failUnlessReallyEqual(w.staticdir, expected)
|
||||
|
||||
def test_manhole_keyfile(self):
|
||||
basedir = u"client.Basic.test_manhole_keyfile"
|
||||
os.mkdir(basedir)
|
||||
fileutil.write(os.path.join(basedir, "tahoe.cfg"),
|
||||
BASECONFIG +
|
||||
"[node]\n" +
|
||||
"ssh.port = tcp:0:interface=127.0.0.1\n" +
|
||||
"ssh.authorized_keys_file = relative\n")
|
||||
c = client.Client(basedir)
|
||||
m = [s for s in c if isinstance(s, AuthorizedKeysManhole)][0]
|
||||
abs_basedir = fileutil.abspath_expanduser_unicode(basedir)
|
||||
expected = fileutil.abspath_expanduser_unicode(u"relative", abs_basedir)
|
||||
self.failUnlessReallyEqual(m.keyfile, expected)
|
||||
|
||||
def _permute(self, sb, key):
|
||||
return [ s.get_longname() for s in sb.get_servers_for_psi(key) ]
|
||||
|
||||
|
@ -61,10 +61,11 @@ from mock import patch
|
||||
import os, sys, locale
|
||||
|
||||
from allmydata.test.common_util import ReallyEqualMixin
|
||||
from allmydata.util import encodingutil
|
||||
from allmydata.util import encodingutil, fileutil
|
||||
from allmydata.util.encodingutil import argv_to_unicode, unicode_to_url, \
|
||||
unicode_to_output, quote_output, unicode_platform, listdir_unicode, \
|
||||
FilenameEncodingError, get_io_encoding, get_filesystem_encoding, _reload
|
||||
unicode_to_output, quote_output, quote_path, quote_local_unicode_path, \
|
||||
unicode_platform, listdir_unicode, FilenameEncodingError, get_io_encoding, \
|
||||
get_filesystem_encoding, _reload
|
||||
from allmydata.dirnode import normalize
|
||||
|
||||
from twisted.python import usage
|
||||
@ -274,8 +275,8 @@ class StdlibUnicode(unittest.TestCase):
|
||||
# to lumiere_nfc (on Mac OS X, it will be the NFD equivalent).
|
||||
self.failUnlessIn(lumiere_nfc + ".txt", set([normalize(fname) for fname in filenames]))
|
||||
|
||||
expanded = os.path.expanduser("~/" + lumiere_nfc)
|
||||
self.failIfIn("~", expanded)
|
||||
expanded = fileutil.expanduser(u"~/" + lumiere_nfc)
|
||||
self.failIfIn(u"~", expanded)
|
||||
self.failUnless(expanded.endswith(lumiere_nfc), expanded)
|
||||
|
||||
def test_open_unrepresentable(self):
|
||||
@ -395,6 +396,19 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
|
||||
self.test_quote_output_utf8(None)
|
||||
|
||||
|
||||
class QuotePaths(ReallyEqualMixin, unittest.TestCase):
|
||||
def test_quote_path(self):
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'bar']), "'foo/bar'")
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'bar'], quotemarks=True), "'foo/bar'")
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar']), '"foo/\\x0abar"')
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar'], quotemarks=True), '"foo/\\x0abar"')
|
||||
|
||||
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\C:\\foo"),
|
||||
"'C:\\foo'" if sys.platform == "win32" else "'\\\\?\\C:\\foo'")
|
||||
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\UNC\\foo\\bar"),
|
||||
"'\\\\foo\\bar'" if sys.platform == "win32" else "'\\\\?\\UNC\\foo\\bar'")
|
||||
|
||||
|
||||
class UbuntuKarmicUTF8(EncodingUtil, unittest.TestCase):
|
||||
uname = 'Linux korn 2.6.31-14-generic #48-Ubuntu SMP Fri Oct 16 14:05:01 UTC 2009 x86_64'
|
||||
argv = 'lumi\xc3\xa8re'
|
||||
|
@ -28,7 +28,7 @@ class LoggingMultiService(service.MultiService):
|
||||
def log(self, msg, **kw):
|
||||
log.msg(msg, **kw)
|
||||
|
||||
class Node(testutil.SignalMixin, unittest.TestCase):
|
||||
class Node(testutil.SignalMixin, testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
def test_furl(self):
|
||||
basedir = "introducer.IntroducerNode.test_furl"
|
||||
os.mkdir(basedir)
|
||||
@ -74,6 +74,20 @@ class Node(testutil.SignalMixin, unittest.TestCase):
|
||||
d.addCallback(_check_furl)
|
||||
return d
|
||||
|
||||
def test_web_static(self):
|
||||
basedir = u"introducer.Node.test_web_static"
|
||||
os.mkdir(basedir)
|
||||
fileutil.write(os.path.join(basedir, "tahoe.cfg"),
|
||||
"[node]\n" +
|
||||
"web.port = tcp:0:interface=127.0.0.1\n" +
|
||||
"web.static = relative\n")
|
||||
c = IntroducerNode(basedir)
|
||||
w = c.getServiceNamed("webish")
|
||||
abs_basedir = fileutil.abspath_expanduser_unicode(basedir)
|
||||
expected = fileutil.abspath_expanduser_unicode(u"relative", abs_basedir)
|
||||
self.failUnlessReallyEqual(w.staticdir, expected)
|
||||
|
||||
|
||||
class ServiceMixin:
|
||||
def setUp(self):
|
||||
self.parent = LoggingMultiService()
|
||||
|
@ -1,10 +1,13 @@
|
||||
import os, re, base64
|
||||
from cStringIO import StringIO
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, reactor
|
||||
|
||||
from allmydata import uri, client
|
||||
from allmydata.nodemaker import NodeMaker
|
||||
from allmydata.util import base32, consumer, fileutil, mathutil
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.hashutil import tagged_hash, ssk_writekey_hash, \
|
||||
ssk_pubkey_fingerprint_hash
|
||||
from allmydata.util.consumer import MemoryConsumer
|
||||
@ -3110,7 +3113,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
|
||||
fso = debug.FindSharesOptions()
|
||||
storage_index = base32.b2a(n.get_storage_index())
|
||||
fso.si_s = storage_index
|
||||
fso.nodedirs = [unicode(os.path.dirname(os.path.abspath(storedir)))
|
||||
fso.nodedirs = [os.path.dirname(abspath_expanduser_unicode(unicode(storedir)))
|
||||
for (i,ss,storedir)
|
||||
in self.iterate_servers()]
|
||||
fso.stdout = StringIO()
|
||||
|
@ -15,6 +15,8 @@ from allmydata.util import limiter, time_format, pollmixin, cachedir
|
||||
from allmydata.util import statistics, dictutil, pipeline
|
||||
from allmydata.util import log as tahoe_log
|
||||
from allmydata.util.spans import Spans, overlap, DataSpans
|
||||
from allmydata.test.common_util import ReallyEqualMixin
|
||||
|
||||
|
||||
class Base32(unittest.TestCase):
|
||||
def test_b2a_matches_Pythons(self):
|
||||
@ -370,7 +372,7 @@ class Asserts(unittest.TestCase):
|
||||
m = self.should_assert(f, False, othermsg="message2")
|
||||
self.failUnlessEqual("postcondition: othermsg: 'message2' <type 'str'>", m)
|
||||
|
||||
class FileUtil(unittest.TestCase):
|
||||
class FileUtil(ReallyEqualMixin, unittest.TestCase):
|
||||
def mkdir(self, basedir, path, mode=0777):
|
||||
fn = os.path.join(basedir, path)
|
||||
fileutil.make_dirs(fn, mode)
|
||||
@ -429,22 +431,6 @@ class FileUtil(unittest.TestCase):
|
||||
fileutil.write_atomically(fn, "two", mode="") # non-binary
|
||||
self.failUnlessEqual(fileutil.read(fn), "two")
|
||||
|
||||
def test_open_or_create(self):
|
||||
basedir = "util/FileUtil/test_open_or_create"
|
||||
fileutil.make_dirs(basedir)
|
||||
fn = os.path.join(basedir, "here")
|
||||
f = fileutil.open_or_create(fn)
|
||||
f.write("stuff.")
|
||||
f.close()
|
||||
f = fileutil.open_or_create(fn)
|
||||
f.seek(0, os.SEEK_END)
|
||||
f.write("more.")
|
||||
f.close()
|
||||
f = open(fn, "r")
|
||||
data = f.read()
|
||||
f.close()
|
||||
self.failUnlessEqual(data, "stuff.more.")
|
||||
|
||||
def test_NamedTemporaryDirectory(self):
|
||||
basedir = "util/FileUtil/test_NamedTemporaryDirectory"
|
||||
fileutil.make_dirs(basedir)
|
||||
@ -488,7 +474,16 @@ class FileUtil(unittest.TestCase):
|
||||
abspath_cwd = fileutil.abspath_expanduser_unicode(u".")
|
||||
self.failUnless(isinstance(saved_cwd, unicode), saved_cwd)
|
||||
self.failUnless(isinstance(abspath_cwd, unicode), abspath_cwd)
|
||||
self.failUnlessEqual(abspath_cwd, saved_cwd)
|
||||
if sys.platform == "win32":
|
||||
self.failUnlessReallyEqual(abspath_cwd, fileutil.to_windows_long_path(saved_cwd))
|
||||
else:
|
||||
self.failUnlessReallyEqual(abspath_cwd, saved_cwd)
|
||||
|
||||
self.failUnlessReallyEqual(fileutil.to_windows_long_path(u"\\\\?\\foo"), u"\\\\?\\foo")
|
||||
self.failUnlessReallyEqual(fileutil.to_windows_long_path(u"\\\\.\\foo"), u"\\\\.\\foo")
|
||||
self.failUnlessReallyEqual(fileutil.to_windows_long_path(u"\\\\server\\foo"), u"\\\\?\\UNC\\server\\foo")
|
||||
self.failUnlessReallyEqual(fileutil.to_windows_long_path(u"C:\\foo"), u"\\\\?\\C:\\foo")
|
||||
self.failUnlessReallyEqual(fileutil.to_windows_long_path(u"C:\\foo/bar"), u"\\\\?\\C:\\foo\\bar")
|
||||
|
||||
# adapted from <http://svn.python.org/view/python/branches/release26-maint/Lib/test/test_posixpath.py?view=markup&pathrev=78279#test_abspath>
|
||||
|
||||
@ -512,6 +507,34 @@ class FileUtil(unittest.TestCase):
|
||||
finally:
|
||||
os.chdir(saved_cwd)
|
||||
|
||||
def test_create_long_path(self):
|
||||
workdir = u"test_create_long_path"
|
||||
fileutil.make_dirs(workdir)
|
||||
long_path = fileutil.abspath_expanduser_unicode(os.path.join(workdir, u'x'*255))
|
||||
def _cleanup():
|
||||
fileutil.remove(long_path)
|
||||
self.addCleanup(_cleanup)
|
||||
|
||||
fileutil.write(long_path, "test")
|
||||
self.failUnless(os.path.exists(long_path))
|
||||
self.failUnlessEqual(fileutil.read(long_path), "test")
|
||||
_cleanup()
|
||||
self.failIf(os.path.exists(long_path))
|
||||
|
||||
def test_windows_expanduser(self):
|
||||
def call_windows_getenv(name):
|
||||
if name == u"HOMEDRIVE": return u"C:"
|
||||
if name == u"HOMEPATH": return u"\\Documents and Settings\\\u0100"
|
||||
self.fail("unexpected argument to call_windows_getenv")
|
||||
self.patch(fileutil, 'windows_getenv', call_windows_getenv)
|
||||
|
||||
self.failUnlessReallyEqual(fileutil.windows_expanduser(u"~"), os.path.join(u"C:", u"\\Documents and Settings\\\u0100"))
|
||||
self.failUnlessReallyEqual(fileutil.windows_expanduser(u"~\\foo"), os.path.join(u"C:", u"\\Documents and Settings\\\u0100", u"foo"))
|
||||
self.failUnlessReallyEqual(fileutil.windows_expanduser(u"~/foo"), os.path.join(u"C:", u"\\Documents and Settings\\\u0100", u"foo"))
|
||||
self.failUnlessReallyEqual(fileutil.windows_expanduser(u"a"), u"a")
|
||||
self.failUnlessReallyEqual(fileutil.windows_expanduser(u"a~"), u"a~")
|
||||
self.failUnlessReallyEqual(fileutil.windows_expanduser(u"a\\~\\foo"), u"a\\~\\foo")
|
||||
|
||||
def test_disk_stats(self):
|
||||
avail = fileutil.get_available_space('.', 2**14)
|
||||
if avail == 0:
|
||||
|
@ -230,6 +230,16 @@ def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
|
||||
def quote_path(path, quotemarks=True):
|
||||
return quote_output("/".join(map(to_str, path)), quotemarks=quotemarks, quote_newlines=True)
|
||||
|
||||
def quote_local_unicode_path(path, quotemarks=True):
|
||||
precondition(isinstance(path, unicode), path)
|
||||
|
||||
if sys.platform == "win32" and path.startswith(u"\\\\?\\"):
|
||||
path = path[4 :]
|
||||
if path.startswith(u"UNC\\"):
|
||||
path = u"\\\\" + path[4 :]
|
||||
|
||||
return quote_output(path, quotemarks=quotemarks, quote_newlines=True)
|
||||
|
||||
|
||||
def unicode_platform():
|
||||
"""
|
||||
|
@ -224,12 +224,6 @@ def remove_if_possible(f):
|
||||
except:
|
||||
pass
|
||||
|
||||
def open_or_create(fname, binarymode=True):
|
||||
try:
|
||||
return open(fname, binarymode and "r+b" or "r+")
|
||||
except EnvironmentError:
|
||||
return open(fname, binarymode and "w+b" or "w+")
|
||||
|
||||
def du(basedir):
|
||||
size = 0
|
||||
|
||||
@ -269,9 +263,11 @@ def read(path):
|
||||
finally:
|
||||
rf.close()
|
||||
|
||||
def put_file(pathname, inf):
|
||||
def put_file(path, inf):
|
||||
precondition_abspath(path)
|
||||
|
||||
# TODO: create temporary file and move into place?
|
||||
outf = open(os.path.expanduser(pathname), "wb")
|
||||
outf = open(path, "wb")
|
||||
try:
|
||||
while True:
|
||||
data = inf.read(32768)
|
||||
@ -282,6 +278,20 @@ def put_file(pathname, inf):
|
||||
outf.close()
|
||||
|
||||
|
||||
def precondition_abspath(path):
|
||||
if not isinstance(path, unicode):
|
||||
raise AssertionError("an abspath must be a Unicode string")
|
||||
|
||||
if sys.platform == "win32":
|
||||
# This intentionally doesn't view absolute paths starting with a drive specification, or
|
||||
# paths relative to the current drive, as acceptable.
|
||||
if not path.startswith("\\\\"):
|
||||
raise AssertionError("an abspath should be normalized using abspath_expanduser_unicode")
|
||||
else:
|
||||
# This intentionally doesn't view the path '~' or paths starting with '~/' as acceptable.
|
||||
if not os.path.isabs(path):
|
||||
raise AssertionError("an abspath should be normalized using abspath_expanduser_unicode")
|
||||
|
||||
# Work around <http://bugs.python.org/issue3426>. This code is adapted from
|
||||
# <http://svn.python.org/view/python/trunk/Lib/ntpath.py?revision=78247&view=markup>
|
||||
# with some simplifications.
|
||||
@ -292,11 +302,20 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
def abspath_expanduser_unicode(path):
|
||||
"""Return the absolute version of a path."""
|
||||
assert isinstance(path, unicode), path
|
||||
def abspath_expanduser_unicode(path, base=None):
|
||||
"""
|
||||
Return the absolute version of a path. If 'base' is given and 'path' is relative,
|
||||
the path will be expanded relative to 'base'.
|
||||
'path' must be a Unicode string. 'base', if given, must be a Unicode string
|
||||
corresponding to an absolute path as returned by a previous call to
|
||||
abspath_expanduser_unicode.
|
||||
"""
|
||||
if not isinstance(path, unicode):
|
||||
raise AssertionError("paths must be Unicode strings")
|
||||
if base is not None:
|
||||
precondition_abspath(base)
|
||||
|
||||
path = os.path.expanduser(path)
|
||||
path = expanduser(path)
|
||||
|
||||
if _getfullpathname:
|
||||
# On Windows, os.path.isabs will return True for paths without a drive letter,
|
||||
@ -307,19 +326,50 @@ def abspath_expanduser_unicode(path):
|
||||
pass
|
||||
|
||||
if not os.path.isabs(path):
|
||||
path = os.path.join(os.getcwdu(), path)
|
||||
if base is None:
|
||||
path = os.path.join(os.getcwdu(), path)
|
||||
else:
|
||||
path = os.path.join(base, path)
|
||||
|
||||
# We won't hit <http://bugs.python.org/issue5827> because
|
||||
# there is always at least one Unicode path component.
|
||||
return os.path.normpath(path)
|
||||
path = os.path.normpath(path)
|
||||
|
||||
if sys.platform == "win32":
|
||||
path = to_windows_long_path(path)
|
||||
|
||||
return path
|
||||
|
||||
def to_windows_long_path(path):
|
||||
# '/' is normally a perfectly valid path component separator in Windows.
|
||||
# However, when using the "\\?\" syntax it is not recognized, so we
|
||||
# replace it with '\' here.
|
||||
path = path.replace(u"/", u"\\")
|
||||
|
||||
# Note that other normalizations such as removing '.' and '..' should
|
||||
# be done outside this function.
|
||||
|
||||
if path.startswith(u"\\\\?\\") or path.startswith(u"\\\\.\\"):
|
||||
return path
|
||||
elif path.startswith(u"\\\\"):
|
||||
return u"\\\\?\\UNC\\" + path[2 :]
|
||||
else:
|
||||
return u"\\\\?\\" + path
|
||||
|
||||
|
||||
have_GetDiskFreeSpaceExW = False
|
||||
if sys.platform == "win32":
|
||||
try:
|
||||
from ctypes import WINFUNCTYPE, windll, POINTER, byref, c_ulonglong
|
||||
from ctypes.wintypes import BOOL, DWORD, LPCWSTR
|
||||
from ctypes import WINFUNCTYPE, windll, POINTER, byref, c_ulonglong, create_unicode_buffer
|
||||
from ctypes.wintypes import BOOL, DWORD, LPCWSTR, LPWSTR
|
||||
|
||||
# <http://msdn.microsoft.com/en-us/library/ms679360%28v=VS.85%29.aspx>
|
||||
GetLastError = WINFUNCTYPE(DWORD)(("GetLastError", windll.kernel32))
|
||||
|
||||
# <http://msdn.microsoft.com/en-us/library/windows/desktop/ms683188%28v=vs.85%29.aspx>
|
||||
GetEnvironmentVariableW = WINFUNCTYPE(DWORD, LPCWSTR, LPWSTR, DWORD)(
|
||||
("GetEnvironmentVariableW", windll.kernel32))
|
||||
|
||||
try:
|
||||
# <http://msdn.microsoft.com/en-us/library/aa383742%28v=VS.85%29.aspx>
|
||||
PULARGE_INTEGER = POINTER(c_ulonglong)
|
||||
|
||||
@ -327,14 +377,57 @@ if sys.platform == "win32":
|
||||
GetDiskFreeSpaceExW = WINFUNCTYPE(BOOL, LPCWSTR, PULARGE_INTEGER, PULARGE_INTEGER, PULARGE_INTEGER)(
|
||||
("GetDiskFreeSpaceExW", windll.kernel32))
|
||||
|
||||
# <http://msdn.microsoft.com/en-us/library/ms679360%28v=VS.85%29.aspx>
|
||||
GetLastError = WINFUNCTYPE(DWORD)(("GetLastError", windll.kernel32))
|
||||
|
||||
have_GetDiskFreeSpaceExW = True
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def expanduser(path):
|
||||
# os.path.expanduser is hopelessly broken for Unicode paths on Windows (ticket #1674).
|
||||
if sys.platform == "win32":
|
||||
return windows_expanduser(path)
|
||||
else:
|
||||
return os.path.expanduser(path)
|
||||
|
||||
def windows_expanduser(path):
|
||||
if not path.startswith('~'):
|
||||
return path
|
||||
home_drive = windows_getenv(u'HOMEDRIVE')
|
||||
home_path = windows_getenv(u'HOMEPATH')
|
||||
if path == '~':
|
||||
return os.path.join(home_drive, home_path)
|
||||
elif path.startswith('~/') or path.startswith('~\\'):
|
||||
return os.path.join(home_drive, home_path, path[2 :])
|
||||
else:
|
||||
return path
|
||||
|
||||
def windows_getenv(name):
|
||||
# Based on <http://stackoverflow.com/questions/2608200/problems-with-umlauts-in-python-appdata-environvent-variable/2608368#2608368>,
|
||||
# with improved error handling.
|
||||
if not isinstance(name, unicode):
|
||||
raise AssertionError("name must be Unicode")
|
||||
|
||||
n = GetEnvironmentVariableW(name, None, 0)
|
||||
if n == 0:
|
||||
err = GetLastError()
|
||||
raise OSError("Windows error %d attempting to read size of environment variable %r"
|
||||
% (err, name))
|
||||
elif n < 0:
|
||||
raise OSError("Unexpected result %d from GetEnvironmentVariableW attempting to read size of environment variable %r"
|
||||
% (n, name))
|
||||
|
||||
buf = create_unicode_buffer(u'\0'*n)
|
||||
retval = GetEnvironmentVariableW(name, buf, n)
|
||||
if retval == 0:
|
||||
err = GetLastError()
|
||||
raise OSError("Windows error %d attempting to read environment variable %r"
|
||||
% (err, name))
|
||||
elif retval != n-1:
|
||||
raise OSError("Unexpected result %d from GetEnvironmentVariableW attempting to read environment variable %r"
|
||||
% (n, name))
|
||||
|
||||
return buf.value
|
||||
|
||||
def get_disk_stats(whichdir, reserved_space=0):
|
||||
"""Return disk statistics for the storage disk, in the form of a dict
|
||||
with the following fields.
|
||||
|
@ -147,6 +147,7 @@ class WebishServer(service.MultiService):
|
||||
self.site = site = appserver.NevowSite(self.root)
|
||||
self.site.requestFactory = MyRequest
|
||||
self.site.remember(MyExceptionHandler(), inevow.ICanHandleException)
|
||||
self.staticdir = staticdir # so tests can check
|
||||
if staticdir:
|
||||
self.root.putChild("static", static.File(staticdir))
|
||||
if re.search(r'^\d', webport):
|
||||
|
Loading…
Reference in New Issue
Block a user