mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-24 23:26:43 +00:00
Quote local paths correctly. refs #2235
Signed-off-by: Daira Hopwood <daira@jacaranda.org>
This commit is contained in:
parent
c20a3525b7
commit
95f98e1aae
@ -2,7 +2,7 @@ import os.path, re, fnmatch
|
|||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from allmydata.scripts.common import get_aliases, get_default_nodedir, \
|
from allmydata.scripts.common import get_aliases, get_default_nodedir, \
|
||||||
DEFAULT_ALIAS, BaseOptions
|
DEFAULT_ALIAS, BaseOptions
|
||||||
from allmydata.util.encodingutil import argv_to_unicode, argv_to_abspath, quote_output
|
from allmydata.util.encodingutil import argv_to_unicode, argv_to_abspath, quote_local_unicode_path
|
||||||
|
|
||||||
NODEURL_RE=re.compile("http(s?)://([^:]*)(:([1-9][0-9]*))?")
|
NODEURL_RE=re.compile("http(s?)://([^:]*)(:([1-9][0-9]*))?")
|
||||||
|
|
||||||
@ -368,7 +368,7 @@ class BackupOptions(FilesystemOptions):
|
|||||||
try:
|
try:
|
||||||
exclude_file = file(abs_filepath)
|
exclude_file = file(abs_filepath)
|
||||||
except:
|
except:
|
||||||
raise BackupConfigurationError('Error opening exclude file %s.' % quote_output(abs_filepath))
|
raise BackupConfigurationError('Error opening exclude file %s.' % quote_local_unicode_path(abs_filepath))
|
||||||
try:
|
try:
|
||||||
for line in exclude_file:
|
for line in exclude_file:
|
||||||
self.opt_exclude(line)
|
self.opt_exclude(line)
|
||||||
|
@ -645,7 +645,7 @@ def find_shares(options):
|
|||||||
/home/warner/testnet/node-2/storage/shares/44k/44kai1tui348689nrw8fjegc8c/2
|
/home/warner/testnet/node-2/storage/shares/44k/44kai1tui348689nrw8fjegc8c/2
|
||||||
"""
|
"""
|
||||||
from allmydata.storage.server import si_a2b, storage_index_to_dir
|
from allmydata.storage.server import si_a2b, storage_index_to_dir
|
||||||
from allmydata.util.encodingutil import listdir_unicode
|
from allmydata.util.encodingutil import listdir_unicode, quote_local_unicode_path
|
||||||
|
|
||||||
out = options.stdout
|
out = options.stdout
|
||||||
sharedir = storage_index_to_dir(si_a2b(options.si_s))
|
sharedir = storage_index_to_dir(si_a2b(options.si_s))
|
||||||
@ -653,7 +653,7 @@ def find_shares(options):
|
|||||||
d = os.path.join(d, "storage", "shares", sharedir)
|
d = os.path.join(d, "storage", "shares", sharedir)
|
||||||
if os.path.exists(d):
|
if os.path.exists(d):
|
||||||
for shnum in listdir_unicode(d):
|
for shnum in listdir_unicode(d):
|
||||||
print >>out, os.path.join(d, shnum)
|
print >>out, quote_local_unicode_path(os.path.join(d, shnum), quotemarks=False)
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ from allmydata.scripts.common import BasedirOptions
|
|||||||
from twisted.scripts import twistd
|
from twisted.scripts import twistd
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from allmydata.util import fileutil
|
from allmydata.util import fileutil
|
||||||
from allmydata.util.encodingutil import listdir_unicode, quote_output
|
from allmydata.util.encodingutil import listdir_unicode, quote_local_unicode_path
|
||||||
|
|
||||||
|
|
||||||
class StartOptions(BasedirOptions):
|
class StartOptions(BasedirOptions):
|
||||||
@ -92,13 +92,14 @@ def identify_node_type(basedir):
|
|||||||
|
|
||||||
def start(config, out=sys.stdout, err=sys.stderr):
|
def start(config, out=sys.stdout, err=sys.stderr):
|
||||||
basedir = config['basedir']
|
basedir = config['basedir']
|
||||||
print >>out, "STARTING", quote_output(basedir)
|
quoted_basedir = quote_local_unicode_path(basedir)
|
||||||
|
print >>out, "STARTING", quoted_basedir
|
||||||
if not os.path.isdir(basedir):
|
if not os.path.isdir(basedir):
|
||||||
print >>err, "%s does not look like a directory at all" % quote_output(basedir)
|
print >>err, "%s does not look like a directory at all" % quoted_basedir
|
||||||
return 1
|
return 1
|
||||||
nodetype = identify_node_type(basedir)
|
nodetype = identify_node_type(basedir)
|
||||||
if not nodetype:
|
if not nodetype:
|
||||||
print >>err, "%s is not a recognizable node directory" % quote_output(basedir)
|
print >>err, "%s is not a recognizable node directory" % quoted_basedir
|
||||||
return 1
|
return 1
|
||||||
# Now prepare to turn into a twistd process. This os.chdir is the point
|
# Now prepare to turn into a twistd process. This os.chdir is the point
|
||||||
# of no return.
|
# of no return.
|
||||||
@ -108,7 +109,7 @@ def start(config, out=sys.stdout, err=sys.stderr):
|
|||||||
and "--nodaemon" not in config.twistd_args
|
and "--nodaemon" not in config.twistd_args
|
||||||
and "--syslog" not in config.twistd_args
|
and "--syslog" not in config.twistd_args
|
||||||
and "--logfile" not in config.twistd_args):
|
and "--logfile" not in config.twistd_args):
|
||||||
fileutil.make_dirs(os.path.join(basedir, "logs"))
|
fileutil.make_dirs(os.path.join(basedir, u"logs"))
|
||||||
twistd_args.extend(["--logfile", os.path.join("logs", "twistd.log")])
|
twistd_args.extend(["--logfile", os.path.join("logs", "twistd.log")])
|
||||||
twistd_args.extend(config.twistd_args)
|
twistd_args.extend(config.twistd_args)
|
||||||
twistd_args.append("StartTahoeNode") # point at our StartTahoeNodePlugin
|
twistd_args.append("StartTahoeNode") # point at our StartTahoeNodePlugin
|
||||||
@ -154,17 +155,18 @@ def start(config, out=sys.stdout, err=sys.stderr):
|
|||||||
else:
|
else:
|
||||||
verb = "starting"
|
verb = "starting"
|
||||||
|
|
||||||
print >>out, "%s node in %s" % (verb, basedir)
|
print >>out, "%s node in %s" % (verb, quoted_basedir)
|
||||||
twistd.runApp(twistd_config)
|
twistd.runApp(twistd_config)
|
||||||
# we should only reach here if --nodaemon or equivalent was used
|
# we should only reach here if --nodaemon or equivalent was used
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def stop(config, out=sys.stdout, err=sys.stderr):
|
def stop(config, out=sys.stdout, err=sys.stderr):
|
||||||
basedir = config['basedir']
|
basedir = config['basedir']
|
||||||
print >>out, "STOPPING", quote_output(basedir)
|
quoted_basedir = quote_local_unicode_path(basedir)
|
||||||
pidfile = os.path.join(basedir, "twistd.pid")
|
print >>out, "STOPPING", quoted_basedir
|
||||||
|
pidfile = os.path.join(basedir, u"twistd.pid")
|
||||||
if not os.path.exists(pidfile):
|
if not os.path.exists(pidfile):
|
||||||
print >>err, "%s does not look like a running node directory (no twistd.pid)" % quote_output(basedir)
|
print >>err, "%s does not look like a running node directory (no twistd.pid)" % quoted_basedir
|
||||||
# we define rc=2 to mean "nothing is running, but it wasn't me who
|
# we define rc=2 to mean "nothing is running, but it wasn't me who
|
||||||
# stopped it"
|
# stopped it"
|
||||||
return 2
|
return 2
|
||||||
|
@ -10,7 +10,7 @@ from allmydata.scripts.common_http import do_http, HTTPError, format_http_error
|
|||||||
from allmydata.util import time_format
|
from allmydata.util import time_format
|
||||||
from allmydata.scripts import backupdb
|
from allmydata.scripts import backupdb
|
||||||
from allmydata.util.encodingutil import listdir_unicode, quote_output, \
|
from allmydata.util.encodingutil import listdir_unicode, quote_output, \
|
||||||
to_str, FilenameEncodingError, unicode_to_url
|
quote_local_unicode_path, to_str, FilenameEncodingError, unicode_to_url
|
||||||
from allmydata.util.assertutil import precondition
|
from allmydata.util.assertutil import precondition
|
||||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||||
|
|
||||||
@ -163,7 +163,8 @@ class BackerUpper:
|
|||||||
precondition(isinstance(localpath, unicode), localpath)
|
precondition(isinstance(localpath, unicode), localpath)
|
||||||
# returns newdircap
|
# returns newdircap
|
||||||
|
|
||||||
self.verboseprint("processing %s" % quote_output(localpath))
|
quoted_path = quote_local_unicode_path(localpath)
|
||||||
|
self.verboseprint("processing %s" % (quoted_path,))
|
||||||
create_contents = {} # childname -> (type, rocap, metadata)
|
create_contents = {} # childname -> (type, rocap, metadata)
|
||||||
compare_contents = {} # childname -> rocap
|
compare_contents = {} # childname -> rocap
|
||||||
|
|
||||||
@ -171,11 +172,11 @@ class BackerUpper:
|
|||||||
children = listdir_unicode(localpath)
|
children = listdir_unicode(localpath)
|
||||||
except EnvironmentError:
|
except EnvironmentError:
|
||||||
self.directories_skipped += 1
|
self.directories_skipped += 1
|
||||||
self.warn("WARNING: permission denied on directory %s" % quote_output(localpath))
|
self.warn("WARNING: permission denied on directory %s" % (quoted_path,))
|
||||||
children = []
|
children = []
|
||||||
except FilenameEncodingError:
|
except FilenameEncodingError:
|
||||||
self.directories_skipped += 1
|
self.directories_skipped += 1
|
||||||
self.warn("WARNING: could not list directory %s due to a filename encoding error" % quote_output(localpath))
|
self.warn("WARNING: could not list directory %s due to a filename encoding error" % (quoted_path,))
|
||||||
children = []
|
children = []
|
||||||
|
|
||||||
for child in self.options.filter_listdir(children):
|
for child in self.options.filter_listdir(children):
|
||||||
@ -197,17 +198,17 @@ class BackerUpper:
|
|||||||
compare_contents[child] = childcap
|
compare_contents[child] = childcap
|
||||||
except EnvironmentError:
|
except EnvironmentError:
|
||||||
self.files_skipped += 1
|
self.files_skipped += 1
|
||||||
self.warn("WARNING: permission denied on file %s" % quote_output(childpath))
|
self.warn("WARNING: permission denied on file %s" % quote_local_unicode_path(childpath))
|
||||||
else:
|
else:
|
||||||
self.files_skipped += 1
|
self.files_skipped += 1
|
||||||
if os.path.islink(childpath):
|
if os.path.islink(childpath):
|
||||||
self.warn("WARNING: cannot backup symlink %s" % quote_output(childpath))
|
self.warn("WARNING: cannot backup symlink %s" % quote_local_unicode_path(childpath))
|
||||||
else:
|
else:
|
||||||
self.warn("WARNING: cannot backup special file %s" % quote_output(childpath))
|
self.warn("WARNING: cannot backup special file %s" % quote_local_unicode_path(childpath))
|
||||||
|
|
||||||
must_create, r = self.check_backupdb_directory(compare_contents)
|
must_create, r = self.check_backupdb_directory(compare_contents)
|
||||||
if must_create:
|
if must_create:
|
||||||
self.verboseprint(" creating directory for %s" % quote_output(localpath))
|
self.verboseprint(" creating directory for %s" % quote_local_unicode_path(localpath))
|
||||||
newdircap = mkdir(create_contents, self.options)
|
newdircap = mkdir(create_contents, self.options)
|
||||||
assert isinstance(newdircap, str)
|
assert isinstance(newdircap, str)
|
||||||
if r:
|
if r:
|
||||||
@ -215,7 +216,7 @@ class BackerUpper:
|
|||||||
self.directories_created += 1
|
self.directories_created += 1
|
||||||
return newdircap
|
return newdircap
|
||||||
else:
|
else:
|
||||||
self.verboseprint(" re-using old directory for %s" % quote_output(localpath))
|
self.verboseprint(" re-using old directory for %s" % quote_local_unicode_path(localpath))
|
||||||
self.directories_reused += 1
|
self.directories_reused += 1
|
||||||
return r.was_created()
|
return r.was_created()
|
||||||
|
|
||||||
@ -290,14 +291,14 @@ class BackerUpper:
|
|||||||
def upload(self, childpath):
|
def upload(self, childpath):
|
||||||
precondition(isinstance(childpath, unicode), childpath)
|
precondition(isinstance(childpath, unicode), childpath)
|
||||||
|
|
||||||
#self.verboseprint("uploading %s.." % quote_output(childpath))
|
#self.verboseprint("uploading %s.." % quote_local_unicode_path(childpath))
|
||||||
metadata = get_local_metadata(childpath)
|
metadata = get_local_metadata(childpath)
|
||||||
|
|
||||||
# we can use the backupdb here
|
# we can use the backupdb here
|
||||||
must_upload, bdb_results = self.check_backupdb_file(childpath)
|
must_upload, bdb_results = self.check_backupdb_file(childpath)
|
||||||
|
|
||||||
if must_upload:
|
if must_upload:
|
||||||
self.verboseprint("uploading %s.." % quote_output(childpath))
|
self.verboseprint("uploading %s.." % quote_local_unicode_path(childpath))
|
||||||
infileobj = open(childpath, "rb")
|
infileobj = open(childpath, "rb")
|
||||||
url = self.options['node-url'] + "uri"
|
url = self.options['node-url'] + "uri"
|
||||||
resp = do_http("PUT", url, infileobj)
|
resp = do_http("PUT", url, infileobj)
|
||||||
@ -305,7 +306,7 @@ class BackerUpper:
|
|||||||
raise HTTPError("Error during file PUT", resp)
|
raise HTTPError("Error during file PUT", resp)
|
||||||
|
|
||||||
filecap = resp.read().strip()
|
filecap = resp.read().strip()
|
||||||
self.verboseprint(" %s -> %s" % (quote_output(childpath, quotemarks=False),
|
self.verboseprint(" %s -> %s" % (quote_local_unicode_path(childpath, quotemarks=False),
|
||||||
quote_output(filecap, quotemarks=False)))
|
quote_output(filecap, quotemarks=False)))
|
||||||
#self.verboseprint(" metadata: %s" % (quote_output(metadata, quotemarks=False),))
|
#self.verboseprint(" metadata: %s" % (quote_output(metadata, quotemarks=False),))
|
||||||
|
|
||||||
@ -316,7 +317,7 @@ class BackerUpper:
|
|||||||
return filecap, metadata
|
return filecap, metadata
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.verboseprint("skipping %s.." % quote_output(childpath))
|
self.verboseprint("skipping %s.." % quote_local_unicode_path(childpath))
|
||||||
self.files_reused += 1
|
self.files_reused += 1
|
||||||
return bdb_results.was_uploaded(), metadata
|
return bdb_results.was_uploaded(), metadata
|
||||||
|
|
||||||
|
@ -10,13 +10,14 @@ from allmydata.scripts.common_http import do_http, HTTPError
|
|||||||
from allmydata import uri
|
from allmydata import uri
|
||||||
from allmydata.util import fileutil
|
from allmydata.util import fileutil
|
||||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||||
from allmydata.util.encodingutil import unicode_to_url, listdir_unicode, quote_output, to_str
|
from allmydata.util.encodingutil import unicode_to_url, listdir_unicode, quote_output, \
|
||||||
|
quote_local_unicode_path, to_str
|
||||||
from allmydata.util.assertutil import precondition
|
from allmydata.util.assertutil import precondition
|
||||||
|
|
||||||
|
|
||||||
class MissingSourceError(TahoeError):
|
class MissingSourceError(TahoeError):
|
||||||
def __init__(self, name):
|
def __init__(self, name, quotefn=quote_output):
|
||||||
TahoeError.__init__(self, "No such file or directory %s" % quote_output(name))
|
TahoeError.__init__(self, "No such file or directory %s" % quotefn(name))
|
||||||
|
|
||||||
|
|
||||||
def GET_to_file(url):
|
def GET_to_file(url):
|
||||||
@ -565,7 +566,7 @@ class Copier:
|
|||||||
pathname = abspath_expanduser_unicode(path.decode('utf-8'))
|
pathname = abspath_expanduser_unicode(path.decode('utf-8'))
|
||||||
name = os.path.basename(pathname)
|
name = os.path.basename(pathname)
|
||||||
if not os.path.exists(pathname):
|
if not os.path.exists(pathname):
|
||||||
raise MissingSourceError(source_spec)
|
raise MissingSourceError(source_spec, quotefn=quote_local_unicode_path)
|
||||||
if os.path.isdir(pathname):
|
if os.path.isdir(pathname):
|
||||||
t = LocalDirectorySource(self.progress, pathname)
|
t = LocalDirectorySource(self.progress, pathname)
|
||||||
else:
|
else:
|
||||||
|
@ -39,7 +39,7 @@ from twisted.python import usage
|
|||||||
|
|
||||||
from allmydata.util.assertutil import precondition
|
from allmydata.util.assertutil import precondition
|
||||||
from allmydata.util.encodingutil import listdir_unicode, unicode_platform, \
|
from allmydata.util.encodingutil import listdir_unicode, unicode_platform, \
|
||||||
quote_output, get_io_encoding, get_filesystem_encoding, \
|
quote_output, quote_local_unicode_path, get_io_encoding, get_filesystem_encoding, \
|
||||||
unicode_to_output, unicode_to_argv, to_str
|
unicode_to_output, unicode_to_argv, to_str
|
||||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||||
|
|
||||||
@ -2854,7 +2854,8 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
|
|||||||
def _check((rc, out, err)):
|
def _check((rc, out, err)):
|
||||||
self.failUnlessReallyEqual(rc, 2)
|
self.failUnlessReallyEqual(rc, 2)
|
||||||
foo2 = os.path.join(source, "foo2.txt")
|
foo2 = os.path.join(source, "foo2.txt")
|
||||||
self.failUnlessReallyEqual(err, "WARNING: cannot backup symlink '%s'\n" % foo2)
|
self.failUnlessIn("WARNING: cannot backup symlink ", err)
|
||||||
|
self.failUnlessIn(foo2, err)
|
||||||
|
|
||||||
fu, fr, fs, dc, dr, ds = self.count_output(out)
|
fu, fr, fs, dc, dr, ds = self.count_output(out)
|
||||||
# foo.txt
|
# foo.txt
|
||||||
|
@ -63,8 +63,9 @@ import os, sys, locale
|
|||||||
from allmydata.test.common_util import ReallyEqualMixin
|
from allmydata.test.common_util import ReallyEqualMixin
|
||||||
from allmydata.util import encodingutil
|
from allmydata.util import encodingutil
|
||||||
from allmydata.util.encodingutil import argv_to_unicode, unicode_to_url, \
|
from allmydata.util.encodingutil import argv_to_unicode, unicode_to_url, \
|
||||||
unicode_to_output, quote_output, unicode_platform, listdir_unicode, \
|
unicode_to_output, quote_output, quote_path, quote_local_unicode_path, \
|
||||||
FilenameEncodingError, get_io_encoding, get_filesystem_encoding, _reload
|
unicode_platform, listdir_unicode, FilenameEncodingError, get_io_encoding, \
|
||||||
|
get_filesystem_encoding, _reload
|
||||||
from allmydata.dirnode import normalize
|
from allmydata.dirnode import normalize
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
@ -395,6 +396,19 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
|
|||||||
self.test_quote_output_utf8(None)
|
self.test_quote_output_utf8(None)
|
||||||
|
|
||||||
|
|
||||||
|
class QuotePaths(ReallyEqualMixin, unittest.TestCase):
|
||||||
|
def test_quote_path(self):
|
||||||
|
self.failUnlessReallyEqual(quote_path([u'foo', u'bar']), "'foo/bar'")
|
||||||
|
self.failUnlessReallyEqual(quote_path([u'foo', u'bar'], quotemarks=True), "'foo/bar'")
|
||||||
|
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar']), '"foo/\\x0abar"')
|
||||||
|
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar'], quotemarks=True), '"foo/\\x0abar"')
|
||||||
|
|
||||||
|
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\C:\\foo"),
|
||||||
|
"'C:\\foo'" if sys.platform == "win32" else "'\\\\?\\C:\\foo'")
|
||||||
|
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\UNC\\foo\\bar"),
|
||||||
|
"'\\\\foo\\bar'" if sys.platform == "win32" else "'\\\\?\\UNC\\foo\\bar'")
|
||||||
|
|
||||||
|
|
||||||
class UbuntuKarmicUTF8(EncodingUtil, unittest.TestCase):
|
class UbuntuKarmicUTF8(EncodingUtil, unittest.TestCase):
|
||||||
uname = 'Linux korn 2.6.31-14-generic #48-Ubuntu SMP Fri Oct 16 14:05:01 UTC 2009 x86_64'
|
uname = 'Linux korn 2.6.31-14-generic #48-Ubuntu SMP Fri Oct 16 14:05:01 UTC 2009 x86_64'
|
||||||
argv = 'lumi\xc3\xa8re'
|
argv = 'lumi\xc3\xa8re'
|
||||||
|
@ -230,6 +230,16 @@ def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
|
|||||||
def quote_path(path, quotemarks=True):
|
def quote_path(path, quotemarks=True):
|
||||||
return quote_output("/".join(map(to_str, path)), quotemarks=quotemarks, quote_newlines=True)
|
return quote_output("/".join(map(to_str, path)), quotemarks=quotemarks, quote_newlines=True)
|
||||||
|
|
||||||
|
def quote_local_unicode_path(path, quotemarks=True):
|
||||||
|
precondition(isinstance(path, unicode), path)
|
||||||
|
|
||||||
|
if sys.platform == "win32" and path.startswith(u"\\\\?\\"):
|
||||||
|
path = path[4 :]
|
||||||
|
if path.startswith(u"UNC\\"):
|
||||||
|
path = u"\\\\" + path[4 :]
|
||||||
|
|
||||||
|
return quote_output(path, quotemarks=quotemarks, quote_newlines=True)
|
||||||
|
|
||||||
|
|
||||||
def unicode_platform():
|
def unicode_platform():
|
||||||
"""
|
"""
|
||||||
|
Loading…
Reference in New Issue
Block a user