Update SFTP implementation and tests: fix #1038 and switch to foolscap logging; also some code reorganization.

This commit is contained in:
david-sarah 2010-05-13 21:31:13 -07:00
parent 5fdd63d0ad
commit 14d0721246
2 changed files with 377 additions and 286 deletions

View File

@ -1,6 +1,7 @@
import os, tempfile, heapq, binascii, traceback, sys import os, tempfile, heapq, binascii, traceback, array, stat
from stat import S_IFREG, S_IFDIR from stat import S_IFREG, S_IFDIR
from time import time, strftime, localtime
from zope.interface import implements from zope.interface import implements
from twisted.python import components from twisted.python import components
@ -17,7 +18,6 @@ from twisted.conch.openssh_compat import primes
from twisted.cred import portal from twisted.cred import portal
from twisted.internet import defer from twisted.internet import defer
from twisted.python.failure import Failure
from twisted.internet.interfaces import IFinishableConsumer from twisted.internet.interfaces import IFinishableConsumer
from foolscap.api import eventually from foolscap.api import eventually
from allmydata.util import deferredutil from allmydata.util import deferredutil
@ -37,20 +37,37 @@ warnings.filterwarnings("ignore", category=DeprecationWarning,
message="BaseException.message has been deprecated as of Python 2.6", message="BaseException.message has been deprecated as of Python 2.6",
module=".*filetransfer", append=True) module=".*filetransfer", append=True)
debug = False noisy = True
use_foolscap_logging = True
if debug: if use_foolscap_logging:
from allmydata.util.log import msg as logmsg, err as logerr, \
NOISY, OPERATIONAL, SCARY, PrefixingLogMixin
else:
def logmsg(s, level=None):
print s
def logerr(s, level=None):
print s
NOISY = None
OPERATIONAL = None
SCARY = None
class PrefixingLogMixin:
def __init__(self, facility=None):
pass
def log(self, s, level=None):
print s
if noisy:
def eventually_callback(d): def eventually_callback(d):
s = traceback.format_stack() s = traceback.format_stack()
def _cb(res): def _cb(res):
try: try:
print "CALLBACK %r %r" % (d, res) if noisy: logmsg("CALLBACK %r %r" % (d, res), level=NOISY)
d.callback(res) d.callback(res)
except: # pragma: no cover except: # pragma: no cover
print "Failed to callback %r" % (d,) logerr("Failed to callback %r with %r\n"
print "with %r" % (res,) "Original stack:\n!%s" %
print "Original stack:" (d, res, '!'.join(s)), level=SCARY)
print '!' + '!'.join(s)
traceback.print_exc() traceback.print_exc()
raise raise
return lambda res: eventually(_cb, res) return lambda res: eventually(_cb, res)
@ -59,13 +76,12 @@ if debug:
s = traceback.format_stack() s = traceback.format_stack()
def _eb(err): def _eb(err):
try: try:
print "ERRBACK", d, err if noisy: logmsg("ERRBACK %r %r" % (d, err), level=NOISY)
d.errback(err) d.errback(err)
except: # pragma: no cover except: # pragma: no cover
print "Failed to errback %r" % (d,) logerr("Failed to errback %r with %r\n"
print "with %r" % (err,) "Original stack:\n!%s" %
print "Original stack:" (d, err, '!'.join(s)), level=SCARY)
print '!' + '!'.join(s)
traceback.print_exc() traceback.print_exc()
raise raise
return lambda err: eventually(_eb, err) return lambda err: eventually(_eb, err)
@ -80,9 +96,8 @@ else:
def _raise_error(err): def _raise_error(err):
if err is None: if err is None:
return None return None
if debug: if noisy: logmsg("RAISE %r" % (err,), level=NOISY)
print "TRACEBACK %r" % (err,) #traceback.print_exc(err)
#traceback.print_exc(err)
# The message argument to SFTPError must not reveal information that # The message argument to SFTPError must not reveal information that
# might compromise anonymity. # might compromise anonymity.
@ -93,11 +108,8 @@ def _raise_error(err):
if err.check(NoSuchChildError): if err.check(NoSuchChildError):
childname = err.value.args[0].encode('utf-8') childname = err.value.args[0].encode('utf-8')
raise SFTPError(FX_NO_SUCH_FILE, childname) raise SFTPError(FX_NO_SUCH_FILE, childname)
if err.check(ExistingChildError): if err.check(ExistingChildError) or err.check(NotWriteableError):
msg = err.value.args[0].encode('utf-8')
# later versions of SFTP define FX_FILE_ALREADY_EXISTS, but version 3 doesn't # later versions of SFTP define FX_FILE_ALREADY_EXISTS, but version 3 doesn't
raise SFTPError(FX_PERMISSION_DENIED, msg)
if err.check(NotWriteableError):
msg = err.value.args[0].encode('utf-8') msg = err.value.args[0].encode('utf-8')
raise SFTPError(FX_PERMISSION_DENIED, msg) raise SFTPError(FX_PERMISSION_DENIED, msg)
if err.check(NotImplementedError): if err.check(NotImplementedError):
@ -121,6 +133,64 @@ def _repr_flags(flags):
] ]
if f]) if f])
def _lsLine(name, attrs):
st_uid = "tahoe"
st_gid = "tahoe"
st_mtime = attrs.get("mtime", 0)
st_mode = attrs["permissions"]
# TODO: check that clients are okay with this being a "?".
# (They should be because the longname is intended for human
# consumption.)
st_size = attrs.get("size", "?")
# We don't know how many links there really are to this object.
st_nlink = 1
# From <http://twistedmatrix.com/trac/browser/trunk/twisted/conch/ls.py?rev=25412>.
# We can't call the version in Twisted because we might have a version earlier than
# <http://twistedmatrix.com/trac/changeset/25412> (released in Twisted 8.2).
mode = st_mode
perms = array.array('c', '-'*10)
ft = stat.S_IFMT(mode)
if stat.S_ISDIR(ft): perms[0] = 'd'
elif stat.S_ISCHR(ft): perms[0] = 'c'
elif stat.S_ISBLK(ft): perms[0] = 'b'
elif stat.S_ISREG(ft): perms[0] = '-'
elif stat.S_ISFIFO(ft): perms[0] = 'f'
elif stat.S_ISLNK(ft): perms[0] = 'l'
elif stat.S_ISSOCK(ft): perms[0] = 's'
else: perms[0] = '?'
# user
if mode&stat.S_IRUSR:perms[1] = 'r'
if mode&stat.S_IWUSR:perms[2] = 'w'
if mode&stat.S_IXUSR:perms[3] = 'x'
# group
if mode&stat.S_IRGRP:perms[4] = 'r'
if mode&stat.S_IWGRP:perms[5] = 'w'
if mode&stat.S_IXGRP:perms[6] = 'x'
# other
if mode&stat.S_IROTH:perms[7] = 'r'
if mode&stat.S_IWOTH:perms[8] = 'w'
if mode&stat.S_IXOTH:perms[9] = 'x'
# suid/sgid never set
l = perms.tostring()
l += str(st_nlink).rjust(5) + ' '
un = str(st_uid)
l += un.ljust(9)
gr = str(st_gid)
l += gr.ljust(9)
sz = str(st_size)
l += sz.rjust(8)
l += ' '
sixmo = 60 * 60 * 24 * 7 * 26
if st_mtime + sixmo < time(): # last edited more than 6mo ago
l += strftime("%b %d %Y ", localtime(st_mtime))
else:
l += strftime("%b %d %H:%M ", localtime(st_mtime))
l += name
return l
def _populate_attrs(childnode, metadata, writeable, size=None): def _populate_attrs(childnode, metadata, writeable, size=None):
attrs = {} attrs = {}
@ -179,10 +249,14 @@ def _populate_attrs(childnode, metadata, writeable, size=None):
return attrs return attrs
class EncryptedTemporaryFile(PrefixingLogMixin):
class EncryptedTemporaryFile:
# not implemented: next, readline, readlines, xreadlines, writelines # not implemented: next, readline, readlines, xreadlines, writelines
def __init__(self):
PrefixingLogMixin.__init__(self, facility="tahoe.sftp")
self.file = tempfile.TemporaryFile()
self.key = os.urandom(16) # AES-128
def _crypt(self, offset, data): def _crypt(self, offset, data):
# FIXME: use random-access AES (pycryptopp ticket #18) # FIXME: use random-access AES (pycryptopp ticket #18)
offset_big = offset // 16 offset_big = offset // 16
@ -192,10 +266,6 @@ class EncryptedTemporaryFile:
cipher.process("\x00"*offset_small) cipher.process("\x00"*offset_small)
return cipher.process(data) return cipher.process(data)
def __init__(self):
self.file = tempfile.TemporaryFile()
self.key = os.urandom(16) # AES-128
def close(self): def close(self):
self.file.close() self.file.close()
@ -203,33 +273,33 @@ class EncryptedTemporaryFile:
self.file.flush() self.file.flush()
def seek(self, offset, whence=os.SEEK_SET): def seek(self, offset, whence=os.SEEK_SET):
if debug: print ".seek(%r, %r)" % (offset, whence) if noisy: self.log(".seek(%r, %r)" % (offset, whence), level=NOISY)
self.file.seek(offset, whence) self.file.seek(offset, whence)
def tell(self): def tell(self):
offset = self.file.tell() offset = self.file.tell()
if debug: print ".offset = %r" % (offset,) if noisy: self.log(".tell() = %r" % (offset,), level=NOISY)
return offset return offset
def read(self, size=-1): def read(self, size=-1):
if debug: print ".read(%r)" % (size,) if noisy: self.log(".read(%r)" % (size,), level=NOISY)
index = self.file.tell() index = self.file.tell()
ciphertext = self.file.read(size) ciphertext = self.file.read(size)
plaintext = self._crypt(index, ciphertext) plaintext = self._crypt(index, ciphertext)
return plaintext return plaintext
def write(self, plaintext): def write(self, plaintext):
if debug: print ".write(%r)" % (plaintext,) if noisy: self.log(".write(%r)" % (plaintext,), level=NOISY)
index = self.file.tell() index = self.file.tell()
ciphertext = self._crypt(index, plaintext) ciphertext = self._crypt(index, plaintext)
self.file.write(ciphertext) self.file.write(ciphertext)
def truncate(self, newsize): def truncate(self, newsize):
if debug: print ".truncate(%r)" % (newsize,) if noisy: self.log(".truncate(%r)" % (newsize,), level=NOISY)
self.file.truncate(newsize) self.file.truncate(newsize)
class OverwriteableFileConsumer: class OverwriteableFileConsumer(PrefixingLogMixin):
implements(IFinishableConsumer) implements(IFinishableConsumer)
"""I act both as a consumer for the download of the original file contents, and as a """I act both as a consumer for the download of the original file contents, and as a
wrapper for a temporary file that records the downloaded data and any overwrites. wrapper for a temporary file that records the downloaded data and any overwrites.
@ -248,6 +318,7 @@ class OverwriteableFileConsumer:
useful for other frontends.""" useful for other frontends."""
def __init__(self, check_abort, download_size, tempfile_maker): def __init__(self, check_abort, download_size, tempfile_maker):
PrefixingLogMixin.__init__(self, facility="tahoe.sftp")
self.check_abort = check_abort self.check_abort = check_abort
self.download_size = download_size self.download_size = download_size
self.current_size = download_size self.current_size = download_size
@ -265,7 +336,8 @@ class OverwriteableFileConsumer:
return self.current_size return self.current_size
def set_current_size(self, size): def set_current_size(self, size):
if debug: print "set_current_size(%r), current_size = %r, downloaded = %r" % (size, self.current_size, self.downloaded) if noisy: self.log(".set_current_size(%r), current_size = %r, downloaded = %r" %
(size, self.current_size, self.downloaded), level=NOISY)
if size < self.current_size or size < self.downloaded: if size < self.current_size or size < self.downloaded:
self.f.truncate(size) self.f.truncate(size)
self.current_size = size self.current_size = size
@ -284,7 +356,7 @@ class OverwriteableFileConsumer:
p.resumeProducing() p.resumeProducing()
def write(self, data): def write(self, data):
if debug: print "write(%r)" % (data,) if noisy: self.log(".write(%r)" % (data,), level=NOISY)
if self.check_abort(): if self.check_abort():
self.close() self.close()
return return
@ -344,7 +416,7 @@ class OverwriteableFileConsumer:
(next, d) = self.milestones[0] (next, d) = self.milestones[0]
if next > milestone: if next > milestone:
return return
if debug: print "MILESTONE %r %r" % (next, d) if noisy: self.log("MILESTONE %r %r" % (next, d), level=NOISY)
heapq.heappop(self.milestones) heapq.heappop(self.milestones)
eventually_callback(d)(None) eventually_callback(d)(None)
@ -352,7 +424,7 @@ class OverwriteableFileConsumer:
self.finish() self.finish()
def overwrite(self, offset, data): def overwrite(self, offset, data):
if debug: print "overwrite(%r, %r)" % (offset, data) if noisy: self.log(".overwrite(%r, %r)" % (offset, data), level=NOISY)
if offset > self.download_size and offset > self.current_size: if offset > self.download_size and offset > self.current_size:
# Normally writing at an offset beyond the current end-of-file # Normally writing at an offset beyond the current end-of-file
# would leave a hole that appears filled with zeroes. However, an # would leave a hole that appears filled with zeroes. However, an
@ -376,7 +448,7 @@ class OverwriteableFileConsumer:
Otherwise errback the Deferred that we return. Otherwise errback the Deferred that we return.
The caller must perform no more overwrites until the Deferred has fired.""" The caller must perform no more overwrites until the Deferred has fired."""
if debug: print "read(%r, %r), current_size = %r" % (offset, length, self.current_size) if noisy: self.log(".read(%r, %r), current_size = %r" % (offset, length, self.current_size), level=NOISY)
if offset >= self.current_size: if offset >= self.current_size:
def _eof(): raise EOFError("read past end of file") def _eof(): raise EOFError("read past end of file")
return defer.execute(_eof) return defer.execute(_eof)
@ -392,20 +464,20 @@ class OverwriteableFileConsumer:
# then extended. # then extended.
assert self.current_size >= offset + length, (self.current_size, offset, length) assert self.current_size >= offset + length, (self.current_size, offset, length)
if debug: print "!!! %r" % (self.f,) if noisy: self.log("self.f = %r" % (self.f,), level=NOISY)
self.f.seek(offset) self.f.seek(offset)
return self.f.read(length) return self.f.read(length)
d.addCallback(_reached) d.addCallback(_reached)
return d return d
def when_reached(self, index): def when_reached(self, index):
if debug: print "when_reached(%r)" % (index,) if noisy: self.log(".when_reached(%r)" % (index,), level=NOISY)
if index <= self.downloaded: # already reached if index <= self.downloaded: # already reached
if debug: print "already reached %r" % (index,) if noisy: self.log("already reached %r" % (index,), level=NOISY)
return defer.succeed(None) return defer.succeed(None)
d = defer.Deferred() d = defer.Deferred()
def _reached(ign): def _reached(ign):
if debug: print "reached %r" % (index,) if noisy: self.log("reached %r" % (index,), level=NOISY)
return ign return ign
d.addCallback(_reached) d.addCallback(_reached)
heapq.heappush(self.milestones, (index, d)) heapq.heappush(self.milestones, (index, d))
@ -417,7 +489,7 @@ class OverwriteableFileConsumer:
def finish(self): def finish(self):
while len(self.milestones) > 0: while len(self.milestones) > 0:
(next, d) = self.milestones[0] (next, d) = self.milestones[0]
if debug: print "MILESTONE FINISH %r %r" % (next, d) if noisy: self.log("MILESTONE FINISH %r %r" % (next, d), level=NOISY)
heapq.heappop(self.milestones) heapq.heappop(self.milestones)
# The callback means that the milestone has been reached if # The callback means that the milestone has been reached if
# it is ever going to be. Note that the file may have been # it is ever going to be. Note that the file may have been
@ -440,6 +512,9 @@ class OverwriteableFileConsumer:
SIZE_THRESHOLD = 1000 SIZE_THRESHOLD = 1000
def _make_sftp_file(check_abort, flags, convergence, parent=None, childname=None, filenode=None, metadata=None): def _make_sftp_file(check_abort, flags, convergence, parent=None, childname=None, filenode=None, metadata=None):
if noisy: logmsg("_make_sftp_file(%r, %r, %r, parent=%r, childname=%r, filenode=%r, metadata=%r" %
(check_abort, flags, convergence, parent, childname, filenode, metadata), NOISY)
if not (flags & (FXF_WRITE | FXF_CREAT)) and (flags & FXF_READ) and filenode and \ if not (flags & (FXF_WRITE | FXF_CREAT)) and (flags & FXF_READ) and filenode and \
not filenode.is_mutable() and filenode.get_size() <= SIZE_THRESHOLD: not filenode.is_mutable() and filenode.get_size() <= SIZE_THRESHOLD:
return ShortReadOnlySFTPFile(filenode, metadata) return ShortReadOnlySFTPFile(filenode, metadata)
@ -448,13 +523,16 @@ def _make_sftp_file(check_abort, flags, convergence, parent=None, childname=None
parent=parent, childname=childname, filenode=filenode, metadata=metadata) parent=parent, childname=childname, filenode=filenode, metadata=metadata)
class ShortReadOnlySFTPFile: class ShortReadOnlySFTPFile(PrefixingLogMixin):
implements(ISFTPFile) implements(ISFTPFile)
"""I represent a file handle to a particular file on an SFTP connection. """I represent a file handle to a particular file on an SFTP connection.
I am used only for short immutable files opened in read-only mode. I am used only for short immutable files opened in read-only mode.
The file contents are downloaded to memory when I am created.""" The file contents are downloaded to memory when I am created."""
def __init__(self, filenode, metadata): def __init__(self, filenode, metadata):
PrefixingLogMixin.__init__(self, facility="tahoe.sftp")
if noisy: self.log(".__init__(%r, %r)" % (filenode, metadata), level=NOISY)
assert IFileNode.providedBy(filenode), filenode assert IFileNode.providedBy(filenode), filenode
self.filenode = filenode self.filenode = filenode
self.metadata = metadata self.metadata = metadata
@ -462,13 +540,15 @@ class ShortReadOnlySFTPFile:
self.closed = False self.closed = False
def readChunk(self, offset, length): def readChunk(self, offset, length):
self.log(".readChunk(%r, %r)" % (offset, length), level=OPERATIONAL)
if self.closed: if self.closed:
def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle") def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle")
return defer.execute(_closed) return defer.execute(_closed)
d = defer.Deferred() d = defer.Deferred()
def _read(data): def _read(data):
if debug: print "_read(%r) in readChunk(%r, %r)" % (data, offset, length) if noisy: self.log("_read(%r) in readChunk(%r, %r)" % (data, offset, length), level=NOISY)
# "In response to this request, the server will read as many bytes as it # "In response to this request, the server will read as many bytes as it
# can from the file (up to 'len'), and return them in a SSH_FXP_DATA # can from the file (up to 'len'), and return them in a SSH_FXP_DATA
@ -488,15 +568,20 @@ class ShortReadOnlySFTPFile:
return d return d
def writeChunk(self, offset, data): def writeChunk(self, offset, data):
self.log(".writeChunk(%r, <data of length %r>)" % (offset, len(data)), level=OPERATIONAL)
def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing")
return defer.execute(_denied) return defer.execute(_denied)
def close(self): def close(self):
self.log(".close()", level=OPERATIONAL)
self.closed = True self.closed = True
return defer.succeed(None) return defer.succeed(None)
def getAttrs(self): def getAttrs(self):
if debug: print "GETATTRS(file)" self.log(".getAttrs()", level=OPERATIONAL)
if self.closed: if self.closed:
def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle") def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle")
return defer.execute(_closed) return defer.execute(_closed)
@ -504,12 +589,12 @@ class ShortReadOnlySFTPFile:
return defer.succeed(_populate_attrs(self.filenode, self.metadata, False)) return defer.succeed(_populate_attrs(self.filenode, self.metadata, False))
def setAttrs(self, attrs): def setAttrs(self, attrs):
if debug: print "SETATTRS(file) %r" % (attrs,) self.log(".setAttrs(%r)" % (attrs,), level=OPERATIONAL)
def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing")
return defer.execute(_denied) return defer.execute(_denied)
class GeneralSFTPFile: class GeneralSFTPFile(PrefixingLogMixin):
implements(ISFTPFile) implements(ISFTPFile)
"""I represent a file handle to a particular file on an SFTP connection. """I represent a file handle to a particular file on an SFTP connection.
I wrap an instance of OverwriteableFileConsumer, which is responsible for I wrap an instance of OverwriteableFileConsumer, which is responsible for
@ -519,6 +604,10 @@ class GeneralSFTPFile:
implemented by the callback chain of self.async.""" implemented by the callback chain of self.async."""
def __init__(self, check_abort, flags, convergence, parent=None, childname=None, filenode=None, metadata=None): def __init__(self, check_abort, flags, convergence, parent=None, childname=None, filenode=None, metadata=None):
PrefixingLogMixin.__init__(self, facility="tahoe.sftp")
if noisy: self.log(".__init__(%r, %r, %r, parent=%r, childname=%r, filenode=%r, metadata=%r)" %
(check_abort, flags, convergence, parent, childname, filenode, metadata), level=NOISY)
self.check_abort = check_abort self.check_abort = check_abort
self.flags = flags self.flags = flags
self.convergence = convergence self.convergence = convergence
@ -532,12 +621,12 @@ class GeneralSFTPFile:
# self.consumer should only be relied on in callbacks for self.async, since it might # self.consumer should only be relied on in callbacks for self.async, since it might
# not be set before then. # not be set before then.
self.consumer = None self.consumer = None
tempfile_maker = EncryptedTemporaryFile
if (flags & FXF_TRUNC) or not filenode: if (flags & FXF_TRUNC) or not filenode:
# We're either truncating or creating the file, so we don't need the old contents. # We're either truncating or creating the file, so we don't need the old contents.
assert flags & FXF_CREAT, flags assert flags & FXF_CREAT, flags
self.consumer = OverwriteableFileConsumer(self.check_abort, 0, self.consumer = OverwriteableFileConsumer(self.check_abort, 0, tempfile_maker)
tempfile_maker=EncryptedTemporaryFile)
self.consumer.finish() self.consumer.finish()
else: else:
assert IFileNode.providedBy(filenode), filenode assert IFileNode.providedBy(filenode), filenode
@ -546,8 +635,7 @@ class GeneralSFTPFile:
if filenode.is_mutable(): if filenode.is_mutable():
self.async.addCallback(lambda ign: filenode.download_best_version()) self.async.addCallback(lambda ign: filenode.download_best_version())
def _downloaded(data): def _downloaded(data):
self.consumer = OverwriteableFileConsumer(self.check_abort, len(data), self.consumer = OverwriteableFileConsumer(self.check_abort, len(data), tempfile_maker)
tempfile_maker=tempfile.TemporaryFile)
self.consumer.write(data) self.consumer.write(data)
self.consumer.finish() self.consumer.finish()
return None return None
@ -555,14 +643,15 @@ class GeneralSFTPFile:
else: else:
download_size = filenode.get_size() download_size = filenode.get_size()
assert download_size is not None assert download_size is not None
self.consumer = OverwriteableFileConsumer(self.check_abort, download_size, self.consumer = OverwriteableFileConsumer(self.check_abort, download_size, tempfile_maker)
tempfile_maker=tempfile.TemporaryFile)
self.async.addCallback(lambda ign: filenode.read(self.consumer, 0, None)) self.async.addCallback(lambda ign: filenode.read(self.consumer, 0, None))
def readChunk(self, offset, length): def readChunk(self, offset, length):
self.log(".readChunk(%r, %r)" % (offset, length), level=OPERATIONAL)
if not (self.flags & FXF_READ): if not (self.flags & FXF_READ):
return defer.fail(SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for reading")) def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for reading")
return defer.execute(_denied)
if self.closed: if self.closed:
def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle") def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle")
@ -570,7 +659,7 @@ class GeneralSFTPFile:
d = defer.Deferred() d = defer.Deferred()
def _read(ign): def _read(ign):
if debug: print "_read in readChunk(%r, %r)" % (offset, length) if noisy: self.log("_read in readChunk(%r, %r)" % (offset, length), level=NOISY)
d2 = self.consumer.read(offset, length) d2 = self.consumer.read(offset, length)
d2.addErrback(_raise_error) d2.addErrback(_raise_error)
d2.addCallbacks(eventually_callback(d), eventually_errback(d)) d2.addCallbacks(eventually_callback(d), eventually_errback(d))
@ -580,7 +669,8 @@ class GeneralSFTPFile:
return d return d
def writeChunk(self, offset, data): def writeChunk(self, offset, data):
if debug: print "writeChunk(%r, %r)" % (offset, data) self.log(".writeChunk(%r, <data of length %r>)" % (offset, len(data)), level=OPERATIONAL)
if not (self.flags & FXF_WRITE): if not (self.flags & FXF_WRITE):
def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing")
return defer.execute(_denied) return defer.execute(_denied)
@ -611,6 +701,8 @@ class GeneralSFTPFile:
return defer.succeed(None) return defer.succeed(None)
def close(self): def close(self):
self.log(".close()", level=OPERATIONAL)
if self.closed: if self.closed:
return defer.succeed(None) return defer.succeed(None)
@ -626,8 +718,13 @@ class GeneralSFTPFile:
d2.addCallback(lambda ign: self.consumer.get_current_size()) d2.addCallback(lambda ign: self.consumer.get_current_size())
d2.addCallback(lambda size: self.consumer.read(0, size)) d2.addCallback(lambda size: self.consumer.read(0, size))
d2.addCallback(lambda new_contents: self.filenode.overwrite(new_contents)) d2.addCallback(lambda new_contents: self.filenode.overwrite(new_contents))
#elif (self.flags & FXF_EXCL) and self.consumer.get_current_size() == 0:
# # The file will already have been written by the open call, so we can
# # optimize out the extra directory write (useful for zero-length lockfiles).
# pass
else: else:
def _add_file(ign): def _add_file(ign):
self.log("_add_file childname=%r" % (self.childname,), level=OPERATIONAL)
u = FileHandle(self.consumer.get_file(), self.convergence) u = FileHandle(self.consumer.get_file(), self.convergence)
return self.parent.add_file(self.childname, u) return self.parent.add_file(self.childname, u)
d2.addCallback(_add_file) d2.addCallback(_add_file)
@ -641,7 +738,7 @@ class GeneralSFTPFile:
return d return d
def getAttrs(self): def getAttrs(self):
if debug: print "GETATTRS(file)" self.log(".getAttrs()", level=OPERATIONAL)
if self.closed: if self.closed:
def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle") def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle")
@ -663,7 +760,8 @@ class GeneralSFTPFile:
return d return d
def setAttrs(self, attrs): def setAttrs(self, attrs):
if debug: print "SETATTRS(file) %r" % (attrs,) self.log(".setAttrs(attrs) %r" % (attrs,), level=OPERATIONAL)
if not (self.flags & FXF_WRITE): if not (self.flags & FXF_WRITE):
def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing")
return defer.execute(_denied) return defer.execute(_denied)
@ -688,18 +786,6 @@ class GeneralSFTPFile:
self.async.addCallbacks(_resize, eventually_errback(d)) self.async.addCallbacks(_resize, eventually_errback(d))
return d return d
class SFTPUser(ConchUser):
def __init__(self, check_abort, client, rootnode, username, convergence):
ConchUser.__init__(self)
self.channelLookup["session"] = session.SSHSession
self.subsystemLookup["sftp"] = FileTransferServer
self.check_abort = check_abort
self.client = client
self.root = rootnode
self.username = username
self.convergence = convergence
class StoppableList: class StoppableList:
def __init__(self, items): def __init__(self, items):
self.items = items self.items = items
@ -709,74 +795,13 @@ class StoppableList:
def close(self): def close(self):
pass pass
import array
import stat
from time import time, strftime, localtime class SFTPHandler(PrefixingLogMixin):
def lsLine(name, attrs):
st_uid = "tahoe"
st_gid = "tahoe"
st_mtime = attrs.get("mtime", 0)
st_mode = attrs["permissions"]
# TODO: check that clients are okay with this being a "?".
# (They should be because the longname is intended for human
# consumption.)
st_size = attrs.get("size", "?")
# We don't know how many links there really are to this object.
st_nlink = 1
# From <http://twistedmatrix.com/trac/browser/trunk/twisted/conch/ls.py?rev=25412>.
# We can't call the version in Twisted because we might have a version earlier than
# <http://twistedmatrix.com/trac/changeset/25412> (released in Twisted 8.2).
mode = st_mode
perms = array.array('c', '-'*10)
ft = stat.S_IFMT(mode)
if stat.S_ISDIR(ft): perms[0] = 'd'
elif stat.S_ISCHR(ft): perms[0] = 'c'
elif stat.S_ISBLK(ft): perms[0] = 'b'
elif stat.S_ISREG(ft): perms[0] = '-'
elif stat.S_ISFIFO(ft): perms[0] = 'f'
elif stat.S_ISLNK(ft): perms[0] = 'l'
elif stat.S_ISSOCK(ft): perms[0] = 's'
else: perms[0] = '?'
# user
if mode&stat.S_IRUSR:perms[1] = 'r'
if mode&stat.S_IWUSR:perms[2] = 'w'
if mode&stat.S_IXUSR:perms[3] = 'x'
# group
if mode&stat.S_IRGRP:perms[4] = 'r'
if mode&stat.S_IWGRP:perms[5] = 'w'
if mode&stat.S_IXGRP:perms[6] = 'x'
# other
if mode&stat.S_IROTH:perms[7] = 'r'
if mode&stat.S_IWOTH:perms[8] = 'w'
if mode&stat.S_IXOTH:perms[9] = 'x'
# suid/sgid never set
l = perms.tostring()
l += str(st_nlink).rjust(5) + ' '
un = str(st_uid)
l += un.ljust(9)
gr = str(st_gid)
l += gr.ljust(9)
sz = str(st_size)
l += sz.rjust(8)
l += ' '
sixmo = 60 * 60 * 24 * 7 * 26
if st_mtime + sixmo < time(): # last edited more than 6mo ago
l += strftime("%b %d %Y ", localtime(st_mtime))
else:
l += strftime("%b %d %H:%M ", localtime(st_mtime))
l += name
return l
class SFTPHandler:
implements(ISFTPServer) implements(ISFTPServer)
def __init__(self, user): def __init__(self, user):
if debug: print "Creating SFTPHandler from", user PrefixingLogMixin.__init__(self, facility="tahoe.sftp")
if noisy: self.log(".__init__(%r)" % (user,), level=NOISY)
self.check_abort = user.check_abort self.check_abort = user.check_abort
self.client = user.client self.client = user.client
self.root = user.root self.root = user.root
@ -784,13 +809,13 @@ class SFTPHandler:
self.convergence = user.convergence self.convergence = user.convergence
def gotVersion(self, otherVersion, extData): def gotVersion(self, otherVersion, extData):
if debug: print "GOTVERSION %r %r" % (otherVersion, extData) self.log(".gotVersion(%r, %r)" % (otherVersion, extData), level=OPERATIONAL)
return {} return {}
def openFile(self, pathstring, flags, attrs): def openFile(self, pathstring, flags, attrs):
if debug: print "OPENFILE %r %r %r %r" % (pathstring, flags, _repr_flags(flags), attrs) self.log(".openFile(%r, %r = %r, %r)" % (pathstring, flags, _repr_flags(flags), attrs), level=OPERATIONAL)
# this is used for both reading and writing.
# This is used for both reading and writing.
# First exclude invalid combinations of flags. # First exclude invalid combinations of flags.
# /usr/bin/sftp 'get' gives us FXF_READ, while 'put' on a new file # /usr/bin/sftp 'get' gives us FXF_READ, while 'put' on a new file
@ -847,6 +872,7 @@ class SFTPHandler:
"Upgrading the gateway to a later Tahoe-LAFS version may help") "Upgrading the gateway to a later Tahoe-LAFS version may help")
if not path: if not path:
# case 1 # case 1
if noisy: self.log("case 1: root = %r, path[:-1] = %r" % (root, path[:-1]), level=NOISY)
if not IFileNode.providedBy(root): if not IFileNode.providedBy(root):
raise SFTPError(FX_PERMISSION_DENIED, raise SFTPError(FX_PERMISSION_DENIED,
"cannot open a directory cap") "cannot open a directory cap")
@ -861,10 +887,11 @@ class SFTPHandler:
else: else:
# case 2 # case 2
childname = path[-1] childname = path[-1]
if debug: print "case 2: childname = %r, path[:-1] = %r" % (childname, path[:-1]) if noisy: self.log("case 2: root = %r, childname = %r, path[:-1] = %r" %
(root, childname, path[:-1]), level=NOISY)
d2 = root.get_child_at_path(path[:-1]) d2 = root.get_child_at_path(path[:-1])
def _got_parent(parent): def _got_parent(parent):
if debug: print "_got_parent(%r)" % (parent,) if noisy: self.log("_got_parent(%r)" % (parent,), level=NOISY)
stash['parent'] = parent stash['parent'] = parent
if flags & FXF_EXCL: if flags & FXF_EXCL:
@ -881,7 +908,8 @@ class SFTPHandler:
# 'overwrite=False' ensures failure if the link already exists. # 'overwrite=False' ensures failure if the link already exists.
# FIXME: should use a single call to set_uri and return (child, metadata) (#1035) # FIXME: should use a single call to set_uri and return (child, metadata) (#1035)
d3 = parent.set_uri(childname, None, "URI:LIT:", overwrite=False) zero_length_lit = "URI:LIT:"
d3 = parent.set_uri(childname, None, zero_length_lit, overwrite=False)
def _seturi_done(child): def _seturi_done(child):
stash['child'] = child stash['child'] = child
return parent.get_metadata_for(childname) return parent.get_metadata_for(childname)
@ -889,12 +917,12 @@ class SFTPHandler:
d3.addCallback(lambda metadata: (stash['child'], metadata)) d3.addCallback(lambda metadata: (stash['child'], metadata))
return d3 return d3
else: else:
if debug: print "get_child_and_metadata" if noisy: self.log("get_child_and_metadata(%r)" % (childname,), level=NOISY)
return parent.get_child_and_metadata(childname) return parent.get_child_and_metadata(childname)
d2.addCallback(_got_parent) d2.addCallback(_got_parent)
def _got_child( (filenode, metadata) ): def _got_child( (filenode, metadata) ):
if debug: print "_got_child((%r, %r))" % (filenode, metadata) if noisy: self.log("_got_child( (%r, %r) )" % (filenode, metadata), level=NOISY)
parent = stash['parent'] parent = stash['parent']
if filenode.is_unknown(): if filenode.is_unknown():
raise SFTPError(FX_PERMISSION_DENIED, raise SFTPError(FX_PERMISSION_DENIED,
@ -913,7 +941,7 @@ class SFTPHandler:
return _make_sftp_file(self.check_abort, flags, self.convergence, parent=parent, return _make_sftp_file(self.check_abort, flags, self.convergence, parent=parent,
childname=childname, filenode=filenode, metadata=metadata) childname=childname, filenode=filenode, metadata=metadata)
def _no_child(f): def _no_child(f):
if debug: print "_no_child(%r)" % (f,) if noisy: self.log("_no_child(%r)" % (f,), level=NOISY)
f.trap(NoSuchChildError) f.trap(NoSuchChildError)
parent = stash['parent'] parent = stash['parent']
if parent is None: if parent is None:
@ -934,12 +962,14 @@ class SFTPHandler:
return d return d
def removeFile(self, pathstring): def removeFile(self, pathstring):
if debug: print "REMOVEFILE %r" % (pathstring,) self.log(".removeFile(%r)" % (pathstring,), level=OPERATIONAL)
path = self._path_from_string(pathstring) path = self._path_from_string(pathstring)
return self._remove_object(path, must_be_file=True) return self._remove_object(path, must_be_file=True)
def renameFile(self, oldpathstring, newpathstring): def renameFile(self, oldpathstring, newpathstring):
if debug: print "RENAMEFILE %r %r" % (oldpathstring, newpathstring) self.log(".renameFile(%r, %r)" % (oldpathstring, newpathstring), level=OPERATIONAL)
fromPath = self._path_from_string(oldpathstring) fromPath = self._path_from_string(oldpathstring)
toPath = self._path_from_string(newpathstring) toPath = self._path_from_string(newpathstring)
@ -947,6 +977,8 @@ class SFTPHandler:
d = deferredutil.gatherResults([self._get_parent(fromPath), d = deferredutil.gatherResults([self._get_parent(fromPath),
self._get_parent(toPath)]) self._get_parent(toPath)])
def _got( (fromPair, toPair) ): def _got( (fromPair, toPair) ):
if noisy: self.log("_got( (%r, %r) ) in .renameFile(%r, %r)" %
(fromPair, toPair, oldpathstring, newpathstring), level=NOISY)
(fromParent, fromChildname) = fromPair (fromParent, fromChildname) = fromPair
(toParent, toChildname) = toPair (toParent, toChildname) = toPair
@ -963,11 +995,12 @@ class SFTPHandler:
return d return d
def makeDirectory(self, pathstring, attrs): def makeDirectory(self, pathstring, attrs):
if debug: print "MAKEDIRECTORY %r %r" % (pathstring, attrs) self.log(".makeDirectory(%r, %r)" % (pathstring, attrs), level=OPERATIONAL)
path = self._path_from_string(pathstring) path = self._path_from_string(pathstring)
metadata = self._attrs_to_metadata(attrs) metadata = self._attrs_to_metadata(attrs)
d = self._get_root(path) d = self._get_root(path)
d.addCallback(lambda (root,path): d.addCallback(lambda (root, path):
self._get_or_create_directories(root, path, metadata)) self._get_or_create_directories(root, path, metadata))
d.addErrback(_raise_error) d.addErrback(_raise_error)
return d return d
@ -991,7 +1024,8 @@ class SFTPHandler:
return d return d
def removeDirectory(self, pathstring): def removeDirectory(self, pathstring):
if debug: print "REMOVEDIRECTORY %r" % (pathstring,) self.log(".removeDirectory(%r)" % (pathstring,), level=OPERATIONAL)
path = self._path_from_string(pathstring) path = self._path_from_string(pathstring)
return self._remove_object(path, must_be_directory=True) return self._remove_object(path, must_be_directory=True)
@ -1013,9 +1047,9 @@ class SFTPHandler:
return d return d
def openDirectory(self, pathstring): def openDirectory(self, pathstring):
if debug: print "OPENDIRECTORY %r" % (pathstring,) self.log(".openDirectory(%r)" % (pathstring,), level=OPERATIONAL)
path = self._path_from_string(pathstring) path = self._path_from_string(pathstring)
if debug: print " PATH %r" % (path,)
d = self._get_node_and_metadata_for_path(path) d = self._get_node_and_metadata_for_path(path)
def _list( (dirnode, metadata) ): def _list( (dirnode, metadata) ):
if dirnode.is_unknown(): if dirnode.is_unknown():
@ -1035,7 +1069,7 @@ class SFTPHandler:
not (node.is_mutable() and node.is_readonly())) not (node.is_mutable() and node.is_readonly()))
attrs = _populate_attrs(node, metadata, writeable) attrs = _populate_attrs(node, metadata, writeable)
filename_utf8 = filename.encode('utf-8') filename_utf8 = filename.encode('utf-8')
longname = lsLine(filename_utf8, attrs) longname = _lsLine(filename_utf8, attrs)
results.append( (filename_utf8, longname, attrs) ) results.append( (filename_utf8, longname, attrs) )
return StoppableList(results) return StoppableList(results)
d2.addCallback(_render) d2.addCallback(_render)
@ -1045,7 +1079,8 @@ class SFTPHandler:
return d return d
def getAttrs(self, pathstring, followLinks): def getAttrs(self, pathstring, followLinks):
if debug: print "GETATTRS %r %r" % (pathstring, followLinks) self.log(".getAttrs(%r, followLinks=%r)" % (pathstring, followLinks), level=OPERATIONAL)
d = self._get_node_and_metadata_for_path(self._path_from_string(pathstring)) d = self._get_node_and_metadata_for_path(self._path_from_string(pathstring))
def _render( (node, metadata) ): def _render( (node, metadata) ):
# When asked about a specific file, report its current size. # When asked about a specific file, report its current size.
@ -1061,14 +1096,11 @@ class SFTPHandler:
return d2 return d2
d.addCallback(_render) d.addCallback(_render)
d.addErrback(_raise_error) d.addErrback(_raise_error)
def _done(res):
if debug: print " DONE %r" % (res,)
return res
d.addBoth(_done)
return d return d
def setAttrs(self, pathstring, attrs): def setAttrs(self, pathstring, attrs):
if debug: print "SETATTRS %r %r" % (pathstring, attrs) self.log(".setAttrs(%r, %r)" % (pathstring, attrs), level=OPERATIONAL)
if "size" in attrs: if "size" in attrs:
# this would require us to download and re-upload the truncated/extended # this would require us to download and re-upload the truncated/extended
# file contents # file contents
@ -1076,26 +1108,31 @@ class SFTPHandler:
return None return None
def readLink(self, pathstring): def readLink(self, pathstring):
if debug: print "READLINK %r" % (pathstring,) self.log(".readLink(%r)" % (pathstring,), level=OPERATIONAL)
raise SFTPError(FX_OP_UNSUPPORTED, "readLink") raise SFTPError(FX_OP_UNSUPPORTED, "readLink")
def makeLink(self, linkPathstring, targetPathstring): def makeLink(self, linkPathstring, targetPathstring):
if debug: print "MAKELINK %r %r" % (linkPathstring, targetPathstring) self.log(".makeLink(%r, %r)" % (linkPathstring, targetPathstring), level=OPERATIONAL)
raise SFTPError(FX_OP_UNSUPPORTED, "makeLink") raise SFTPError(FX_OP_UNSUPPORTED, "makeLink")
def extendedRequest(self, extendedName, extendedData): def extendedRequest(self, extendedName, extendedData):
if debug: print "EXTENDEDREQUEST %r %r" % (extendedName, extendedData) self.log(".extendedRequest(%r, %r)" % (extendedName, extendedData), level=OPERATIONAL)
# Client 'df' command requires 'statvfs@openssh.com' extension
# (but there's little point to implementing that since we would only # A client 'df' command requires the 'statvfs@openssh.com' extension,
# have faked values to report). # but there's little point to implementing that since we would only
# have faked values to report.
raise SFTPError(FX_OP_UNSUPPORTED, "extendedRequest %r" % extendedName) raise SFTPError(FX_OP_UNSUPPORTED, "extendedRequest %r" % extendedName)
def realPath(self, pathstring): def realPath(self, pathstring):
if debug: print "REALPATH %r" % (pathstring,) self.log(".realPath(%r)" % (pathstring,), level=OPERATIONAL)
return "/" + "/".join(self._path_from_string(pathstring))
path_utf8 = [p.encode('utf-8') for p in self._path_from_string(pathstring)]
return "/" + "/".join(path_utf8)
def _path_from_string(self, pathstring): def _path_from_string(self, pathstring):
if debug: print "CONVERT %r" % (pathstring,) if noisy: self.log("CONVERT %r" % (pathstring,), level=NOISY)
# The home directory is the root directory. # The home directory is the root directory.
pathstring = pathstring.strip("/") pathstring = pathstring.strip("/")
@ -1120,29 +1157,27 @@ class SFTPHandler:
raise SFTPError(FX_NO_SUCH_FILE, "path could not be decoded as UTF-8") raise SFTPError(FX_NO_SUCH_FILE, "path could not be decoded as UTF-8")
path.append(p) path.append(p)
if debug: print " PATH %r" % (path,) if noisy: self.log(" PATH %r" % (path,), level=NOISY)
return path return path
def _get_node_and_metadata_for_path(self, path): def _get_node_and_metadata_for_path(self, path):
d = self._get_root(path) d = self._get_root(path)
def _got_root( (root, path) ): def _got_root( (root, path) ):
if debug: print " ROOT %r" % (root,) if noisy: self.log("_got_root( (%r, %r) )" % (root, path), level=NOISY)
if debug: print " PATH %r" % (path,)
if path: if path:
return root.get_child_and_metadata_at_path(path) return root.get_child_and_metadata_at_path(path)
else: else:
return (root,{}) return (root, {})
d.addCallback(_got_root) d.addCallback(_got_root)
return d return d
def _get_root(self, path): def _get_root(self, path):
# return (root, remaining_path) # return (root, remaining_path)
if path and path[0] == u"uri": if path and path[0] == u"uri":
d = defer.maybeDeferred(self.client.create_node_from_uri, d = defer.maybeDeferred(self.client.create_node_from_uri, path[1].encode('utf-8'))
str(path[1]))
d.addCallback(lambda root: (root, path[2:])) d.addCallback(lambda root: (root, path[2:]))
else: else:
d = defer.succeed((self.root,path)) d = defer.succeed((self.root, path))
return d return d
def _get_parent(self, path): def _get_parent(self, path):
@ -1176,6 +1211,18 @@ class SFTPHandler:
return metadata return metadata
class SFTPUser(ConchUser):
def __init__(self, check_abort, client, rootnode, username, convergence):
ConchUser.__init__(self)
self.channelLookup["session"] = session.SSHSession
self.subsystemLookup["sftp"] = FileTransferServer
self.check_abort = check_abort
self.client = client
self.root = rootnode
self.username = username
self.convergence = convergence
# if you have an SFTPUser, and you want something that provides ISFTPServer, # if you have an SFTPUser, and you want something that provides ISFTPServer,
# then you get SFTPHandler(user) # then you get SFTPHandler(user)
components.registerAdapter(SFTPHandler, SFTPUser, ISFTPServer) components.registerAdapter(SFTPHandler, SFTPUser, ISFTPServer)
@ -1234,4 +1281,3 @@ class SFTPServer(service.MultiService):
s = strports.service(sftp_portstr, f) s = strports.service(sftp_portstr, f)
s.setServiceParent(self) s.setServiceParent(self)

View File

@ -24,24 +24,25 @@ if have_pycrypto:
#from twisted.internet.base import DelayedCall #from twisted.internet.base import DelayedCall
#DelayedCall.debug = True #DelayedCall.debug = True
import sys, traceback import traceback
""" """
def trace_exceptions(frame, event, arg): import sys
if event != 'exception': def trace_exceptions(frame, event, arg):
return if event != 'exception':
co = frame.f_code return
func_name = co.co_name co = frame.f_code
line_no = frame.f_lineno func_name = co.co_name
filename = co.co_filename line_no = frame.f_lineno
exc_type, exc_value, exc_traceback = arg filename = co.co_filename
print 'Tracing exception: %r %r on line %r of %r in %r' % \ exc_type, exc_value, exc_traceback = arg
(exc_type.__name__, exc_value, line_no, func_name, filename) print 'Tracing exception: %r %r on line %r of %r in %r' % \
(exc_type.__name__, exc_value, line_no, func_name, filename)
def trace_calls(frame, event, arg):
if event != 'call': def trace_calls(frame, event, arg):
return if event != 'call':
return trace_exceptions return
return trace_exceptions
sys.settrace(trace_calls) sys.settrace(trace_calls)
""" """
@ -70,9 +71,9 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
def _done(res): def _done(res):
if isinstance(res, Failure): if isinstance(res, Failure):
res.trap(sftp.SFTPError) res.trap(sftp.SFTPError)
self.failUnlessEqual(res.value.code, expected_code, self.failUnlessReallyEqual(res.value.code, expected_code,
"%s was supposed to raise SFTPError(%d), not SFTPError(%d): %s" % "%s was supposed to raise SFTPError(%d), not SFTPError(%d): %s" %
(which, expected_code, res.value.code, res)) (which, expected_code, res.value.code, res))
else: else:
print '@' + '@'.join(s) print '@' + '@'.join(s)
self.fail("%s was supposed to raise SFTPError(%d), not get '%s'" % self.fail("%s was supposed to raise SFTPError(%d), not get '%s'" %
@ -80,6 +81,10 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d.addBoth(_done) d.addBoth(_done)
return d return d
def failUnlessReallyEqual(self, a, b, msg=None):
self.failUnlessEqual(a, b, msg=msg)
self.failUnlessEqual(type(a), type(b), msg=msg)
def _set_up(self, basedir, num_clients=1, num_servers=10): def _set_up(self, basedir, num_clients=1, num_servers=10):
self.basedir = "sftp/" + basedir self.basedir = "sftp/" + basedir
self.set_up_grid(num_clients=num_clients, num_servers=num_servers) self.set_up_grid(num_clients=num_clients, num_servers=num_servers)
@ -175,43 +180,43 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
version = self.handler.gotVersion(3, {}) version = self.handler.gotVersion(3, {})
self.failUnless(isinstance(version, dict)) self.failUnless(isinstance(version, dict))
self.failUnlessEqual(self.handler._path_from_string(""), []) self.failUnlessReallyEqual(self.handler._path_from_string(""), [])
self.failUnlessEqual(self.handler._path_from_string("/"), []) self.failUnlessReallyEqual(self.handler._path_from_string("/"), [])
self.failUnlessEqual(self.handler._path_from_string("."), []) self.failUnlessReallyEqual(self.handler._path_from_string("."), [])
self.failUnlessEqual(self.handler._path_from_string("//"), []) self.failUnlessReallyEqual(self.handler._path_from_string("//"), [])
self.failUnlessEqual(self.handler._path_from_string("/."), []) self.failUnlessReallyEqual(self.handler._path_from_string("/."), [])
self.failUnlessEqual(self.handler._path_from_string("/./"), []) self.failUnlessReallyEqual(self.handler._path_from_string("/./"), [])
self.failUnlessEqual(self.handler._path_from_string("foo"), [u"foo"]) self.failUnlessReallyEqual(self.handler._path_from_string("foo"), [u"foo"])
self.failUnlessEqual(self.handler._path_from_string("/foo"), [u"foo"]) self.failUnlessReallyEqual(self.handler._path_from_string("/foo"), [u"foo"])
self.failUnlessEqual(self.handler._path_from_string("foo/"), [u"foo"]) self.failUnlessReallyEqual(self.handler._path_from_string("foo/"), [u"foo"])
self.failUnlessEqual(self.handler._path_from_string("/foo/"), [u"foo"]) self.failUnlessReallyEqual(self.handler._path_from_string("/foo/"), [u"foo"])
self.failUnlessEqual(self.handler._path_from_string("foo/bar"), [u"foo", u"bar"]) self.failUnlessReallyEqual(self.handler._path_from_string("foo/bar"), [u"foo", u"bar"])
self.failUnlessEqual(self.handler._path_from_string("/foo/bar"), [u"foo", u"bar"]) self.failUnlessReallyEqual(self.handler._path_from_string("/foo/bar"), [u"foo", u"bar"])
self.failUnlessEqual(self.handler._path_from_string("foo/bar//"), [u"foo", u"bar"]) self.failUnlessReallyEqual(self.handler._path_from_string("foo/bar//"), [u"foo", u"bar"])
self.failUnlessEqual(self.handler._path_from_string("/foo/bar//"), [u"foo", u"bar"]) self.failUnlessReallyEqual(self.handler._path_from_string("/foo/bar//"), [u"foo", u"bar"])
self.failUnlessEqual(self.handler._path_from_string("foo/../bar"), [u"bar"]) self.failUnlessReallyEqual(self.handler._path_from_string("foo/../bar"), [u"bar"])
self.failUnlessEqual(self.handler._path_from_string("/foo/../bar"), [u"bar"]) self.failUnlessReallyEqual(self.handler._path_from_string("/foo/../bar"), [u"bar"])
self.failUnlessEqual(self.handler._path_from_string("../bar"), [u"bar"]) self.failUnlessReallyEqual(self.handler._path_from_string("../bar"), [u"bar"])
self.failUnlessEqual(self.handler._path_from_string("/../bar"), [u"bar"]) self.failUnlessReallyEqual(self.handler._path_from_string("/../bar"), [u"bar"])
self.failUnlessEqual(self.handler.realPath(""), "/") self.failUnlessReallyEqual(self.handler.realPath(""), "/")
self.failUnlessEqual(self.handler.realPath("/"), "/") self.failUnlessReallyEqual(self.handler.realPath("/"), "/")
self.failUnlessEqual(self.handler.realPath("."), "/") self.failUnlessReallyEqual(self.handler.realPath("."), "/")
self.failUnlessEqual(self.handler.realPath("//"), "/") self.failUnlessReallyEqual(self.handler.realPath("//"), "/")
self.failUnlessEqual(self.handler.realPath("/."), "/") self.failUnlessReallyEqual(self.handler.realPath("/."), "/")
self.failUnlessEqual(self.handler.realPath("/./"), "/") self.failUnlessReallyEqual(self.handler.realPath("/./"), "/")
self.failUnlessEqual(self.handler.realPath("foo"), "/foo") self.failUnlessReallyEqual(self.handler.realPath("foo"), "/foo")
self.failUnlessEqual(self.handler.realPath("/foo"), "/foo") self.failUnlessReallyEqual(self.handler.realPath("/foo"), "/foo")
self.failUnlessEqual(self.handler.realPath("foo/"), "/foo") self.failUnlessReallyEqual(self.handler.realPath("foo/"), "/foo")
self.failUnlessEqual(self.handler.realPath("/foo/"), "/foo") self.failUnlessReallyEqual(self.handler.realPath("/foo/"), "/foo")
self.failUnlessEqual(self.handler.realPath("foo/bar"), "/foo/bar") self.failUnlessReallyEqual(self.handler.realPath("foo/bar"), "/foo/bar")
self.failUnlessEqual(self.handler.realPath("/foo/bar"), "/foo/bar") self.failUnlessReallyEqual(self.handler.realPath("/foo/bar"), "/foo/bar")
self.failUnlessEqual(self.handler.realPath("foo/bar//"), "/foo/bar") self.failUnlessReallyEqual(self.handler.realPath("foo/bar//"), "/foo/bar")
self.failUnlessEqual(self.handler.realPath("/foo/bar//"), "/foo/bar") self.failUnlessReallyEqual(self.handler.realPath("/foo/bar//"), "/foo/bar")
self.failUnlessEqual(self.handler.realPath("foo/../bar"), "/bar") self.failUnlessReallyEqual(self.handler.realPath("foo/../bar"), "/bar")
self.failUnlessEqual(self.handler.realPath("/foo/../bar"), "/bar") self.failUnlessReallyEqual(self.handler.realPath("/foo/../bar"), "/bar")
self.failUnlessEqual(self.handler.realPath("../bar"), "/bar") self.failUnlessReallyEqual(self.handler.realPath("../bar"), "/bar")
self.failUnlessEqual(self.handler.realPath("/../bar"), "/bar") self.failUnlessReallyEqual(self.handler.realPath("/../bar"), "/bar")
d.addCallback(_check) d.addCallback(_check)
d.addCallback(lambda ign: d.addCallback(lambda ign:
@ -224,7 +229,7 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
return d return d
def test_raise_error(self): def test_raise_error(self):
self.failUnlessEqual(sftpd._raise_error(None), None) self.failUnlessReallyEqual(sftpd._raise_error(None), None)
d = defer.succeed(None) d = defer.succeed(None)
d.addCallback(lambda ign: d.addCallback(lambda ign:
@ -273,20 +278,20 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
def _compareDirLists(self, actual, expected): def _compareDirLists(self, actual, expected):
actual_list = sorted(actual) actual_list = sorted(actual)
expected_list = sorted(expected) expected_list = sorted(expected)
self.failUnlessEqual(len(actual_list), len(expected_list), self.failUnlessReallyEqual(len(actual_list), len(expected_list),
"%r is wrong length, expecting %r" % (actual_list, expected_list)) "%r is wrong length, expecting %r" % (actual_list, expected_list))
for (a, b) in zip(actual_list, expected_list): for (a, b) in zip(actual_list, expected_list):
(name, text, attrs) = a (name, text, attrs) = a
(expected_name, expected_text_re, expected_attrs) = b (expected_name, expected_text_re, expected_attrs) = b
self.failUnlessEqual(name, expected_name) self.failUnlessReallyEqual(name, expected_name)
self.failUnless(re.match(expected_text_re, text), "%r does not match %r" % (text, expected_text_re)) self.failUnless(re.match(expected_text_re, text), "%r does not match %r" % (text, expected_text_re))
# it is ok for there to be extra actual attributes # it is ok for there to be extra actual attributes
# TODO: check times # TODO: check times
for e in expected_attrs: for e in expected_attrs:
self.failUnlessEqual(attrs[e], expected_attrs[e]) self.failUnlessReallyEqual(attrs[e], expected_attrs[e])
def test_openDirectory_and_attrs(self): def test_openDirectory_and_attrs(self):
d = self._set_up("openDirectory") d = self._set_up("openDirectory_and_attrs")
d.addCallback(lambda ign: self._set_up_tree()) d.addCallback(lambda ign: self._set_up_tree())
d.addCallback(lambda ign: d.addCallback(lambda ign:
@ -303,14 +308,14 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
self.handler.openDirectory, "nodir/nodir")) self.handler.openDirectory, "nodir/nodir"))
gross = u"gro\u00DF".encode("utf-8") gross = u"gro\u00DF".encode("utf-8")
expected_root = [ expected_root = [
('empty_lit_dir', r'drwxrwx--- .* \? .* empty_lit_dir$', {'permissions': S_IFDIR | 0770}), ('empty_lit_dir', r'drwxrwx--- .* \? .* empty_lit_dir$', {'permissions': S_IFDIR | 0770}),
(gross, r'-rw-rw---- .* 1010 .* '+gross+'$', {'permissions': S_IFREG | 0660, 'size': 1010}), (gross, r'-rw-rw---- .* 1010 .* '+gross+'$', {'permissions': S_IFREG | 0660, 'size': 1010}),
('loop', r'drwxrwx--- .* \? .* loop$', {'permissions': S_IFDIR | 0770}), ('loop', r'drwxrwx--- .* \? .* loop$', {'permissions': S_IFDIR | 0770}),
('mutable', r'-rw-rw---- .* \? .* mutable$', {'permissions': S_IFREG | 0660}), ('mutable', r'-rw-rw---- .* \? .* mutable$', {'permissions': S_IFREG | 0660}),
('readonly', r'-r--r----- .* \? .* readonly$', {'permissions': S_IFREG | 0440}), ('readonly', r'-r--r----- .* \? .* readonly$', {'permissions': S_IFREG | 0440}),
('small', r'-rw-rw---- .* 10 .* small$', {'permissions': S_IFREG | 0660, 'size': 10}), ('small', r'-rw-rw---- .* 10 .* small$', {'permissions': S_IFREG | 0660, 'size': 10}),
('small2', r'-rw-rw---- .* 26 .* small2$', {'permissions': S_IFREG | 0660, 'size': 26}), ('small2', r'-rw-rw---- .* 26 .* small2$', {'permissions': S_IFREG | 0660, 'size': 26}),
('tiny_lit_dir', r'drwxrwx--- .* \? .* tiny_lit_dir$', {'permissions': S_IFDIR | 0770}), ('tiny_lit_dir', r'drwxrwx--- .* \? .* tiny_lit_dir$', {'permissions': S_IFDIR | 0770}),
('unknown', r'\?--------- .* \? .* unknown$', {'permissions': 0}), ('unknown', r'\?--------- .* \? .* unknown$', {'permissions': 0}),
] ]
@ -336,12 +341,12 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d.addCallback(lambda ign: self.handler.getAttrs("small", True)) d.addCallback(lambda ign: self.handler.getAttrs("small", True))
def _check_attrs(attrs): def _check_attrs(attrs):
self.failUnlessEqual(attrs['permissions'], S_IFREG | 0440) #FIXME self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0440) #FIXME
self.failUnlessEqual(attrs['size'], 10) self.failUnlessReallyEqual(attrs['size'], 10)
d.addCallback(_check_attrs) d.addCallback(_check_attrs)
d.addCallback(lambda ign: d.addCallback(lambda ign:
self.failUnlessEqual(self.handler.setAttrs("small", {}), None)) self.failUnlessReallyEqual(self.handler.setAttrs("small", {}), None))
d.addCallback(lambda ign: d.addCallback(lambda ign:
self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "setAttrs size", self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "setAttrs size",
@ -350,7 +355,7 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
return d return d
def test_openFile_read(self): def test_openFile_read(self):
d = self._set_up("openFile") d = self._set_up("openFile_read")
d.addCallback(lambda ign: self._set_up_tree()) d.addCallback(lambda ign: self._set_up_tree())
d.addCallback(lambda ign: d.addCallback(lambda ign:
@ -377,18 +382,25 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d.addCallback(lambda ign: d.addCallback(lambda ign:
self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir uri READ denied", self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir uri READ denied",
self.handler.openFile, "uri/"+self.tiny_lit_dir_uri, sftp.FXF_READ, {})) self.handler.openFile, "uri/"+self.tiny_lit_dir_uri, sftp.FXF_READ, {}))
# FIXME: should be FX_NO_SUCH_FILE?
d.addCallback(lambda ign:
self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile noexist uri READ denied",
self.handler.openFile, "uri/URI:noexist", sftp.FXF_READ, {}))
d.addCallback(lambda ign:
self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile invalid UTF-8 uri READ denied",
self.handler.openFile, "uri/URI:\xFF", sftp.FXF_READ, {}))
# reading an existing file should succeed # reading an existing file should succeed
d.addCallback(lambda ign: self.handler.openFile("small", sftp.FXF_READ, {})) d.addCallback(lambda ign: self.handler.openFile("small", sftp.FXF_READ, {}))
def _read_small(rf): def _read_small(rf):
d2 = rf.readChunk(0, 10) d2 = rf.readChunk(0, 10)
d2.addCallback(lambda data: self.failUnlessEqual(data, "0123456789")) d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789"))
d2.addCallback(lambda ign: rf.readChunk(2, 6)) d2.addCallback(lambda ign: rf.readChunk(2, 6))
d2.addCallback(lambda data: self.failUnlessEqual(data, "234567")) d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "234567"))
d2.addCallback(lambda ign: rf.readChunk(8, 4)) # read that starts before EOF is OK d2.addCallback(lambda ign: rf.readChunk(8, 4)) # read that starts before EOF is OK
d2.addCallback(lambda data: self.failUnlessEqual(data, "89")) d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "89"))
d2.addCallback(lambda ign: d2.addCallback(lambda ign:
self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting at EOF (0-byte)", self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting at EOF (0-byte)",
@ -402,8 +414,8 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d2.addCallback(lambda ign: rf.getAttrs()) d2.addCallback(lambda ign: rf.getAttrs())
def _check_attrs(attrs): def _check_attrs(attrs):
self.failUnlessEqual(attrs['permissions'], S_IFREG | 0440) #FIXME self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0440) #FIXME
self.failUnlessEqual(attrs['size'], 10) self.failUnlessReallyEqual(attrs['size'], 10)
d2.addCallback(_check_attrs) d2.addCallback(_check_attrs)
d2.addCallback(lambda ign: d2.addCallback(lambda ign:
@ -431,13 +443,13 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d.addCallback(lambda ign: self.handler.openFile(gross, sftp.FXF_READ, {})) d.addCallback(lambda ign: self.handler.openFile(gross, sftp.FXF_READ, {}))
def _read_gross(rf): def _read_gross(rf):
d2 = rf.readChunk(0, 10) d2 = rf.readChunk(0, 10)
d2.addCallback(lambda data: self.failUnlessEqual(data, "0123456789")) d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789"))
d2.addCallback(lambda ign: rf.readChunk(2, 6)) d2.addCallback(lambda ign: rf.readChunk(2, 6))
d2.addCallback(lambda data: self.failUnlessEqual(data, "234567")) d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "234567"))
d2.addCallback(lambda ign: rf.readChunk(1008, 4)) # read that starts before EOF is OK d2.addCallback(lambda ign: rf.readChunk(1008, 4)) # read that starts before EOF is OK
d2.addCallback(lambda data: self.failUnlessEqual(data, "89")) d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "89"))
d2.addCallback(lambda ign: d2.addCallback(lambda ign:
self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting at EOF (0-byte)", self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting at EOF (0-byte)",
@ -451,8 +463,8 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d2.addCallback(lambda ign: rf.getAttrs()) d2.addCallback(lambda ign: rf.getAttrs())
def _check_attrs(attrs): def _check_attrs(attrs):
self.failUnlessEqual(attrs['permissions'], S_IFREG | 0440) #FIXME self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0440) #FIXME
self.failUnlessEqual(attrs['size'], 1010) self.failUnlessReallyEqual(attrs['size'], 1010)
d2.addCallback(_check_attrs) d2.addCallback(_check_attrs)
d2.addCallback(lambda ign: d2.addCallback(lambda ign:
@ -479,7 +491,7 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d.addCallback(lambda ign: self.handler.openFile("uri/"+self.small_uri, sftp.FXF_READ, {})) d.addCallback(lambda ign: self.handler.openFile("uri/"+self.small_uri, sftp.FXF_READ, {}))
def _read_small_uri(rf): def _read_small_uri(rf):
d2 = rf.readChunk(0, 10) d2 = rf.readChunk(0, 10)
d2.addCallback(lambda data: self.failUnlessEqual(data, "0123456789")) d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789"))
d2.addCallback(lambda ign: rf.close()) d2.addCallback(lambda ign: rf.close())
return d2 return d2
d.addCallback(_read_small_uri) d.addCallback(_read_small_uri)
@ -488,7 +500,7 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d.addCallback(lambda ign: self.handler.openFile("uri/"+self.gross_uri, sftp.FXF_READ, {})) d.addCallback(lambda ign: self.handler.openFile("uri/"+self.gross_uri, sftp.FXF_READ, {}))
def _read_gross_uri(rf): def _read_gross_uri(rf):
d2 = rf.readChunk(0, 10) d2 = rf.readChunk(0, 10)
d2.addCallback(lambda data: self.failUnlessEqual(data, "0123456789")) d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789"))
d2.addCallback(lambda ign: rf.close()) d2.addCallback(lambda ign: rf.close())
return d2 return d2
d.addCallback(_read_gross_uri) d.addCallback(_read_gross_uri)
@ -497,20 +509,29 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d.addCallback(lambda ign: self.handler.openFile("uri/"+self.mutable_uri, sftp.FXF_READ, {})) d.addCallback(lambda ign: self.handler.openFile("uri/"+self.mutable_uri, sftp.FXF_READ, {}))
def _read_mutable_uri(rf): def _read_mutable_uri(rf):
d2 = rf.readChunk(0, 100) d2 = rf.readChunk(0, 100)
d2.addCallback(lambda data: self.failUnlessEqual(data, "mutable file contents")) d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "mutable file contents"))
d2.addCallback(lambda ign: rf.close()) d2.addCallback(lambda ign: rf.close())
return d2 return d2
d.addCallback(_read_mutable_uri) d.addCallback(_read_mutable_uri)
# repeat for a file within a directory referenced by URI
d.addCallback(lambda ign: self.handler.openFile("uri/"+self.tiny_lit_dir_uri+"/short", sftp.FXF_READ, {}))
def _read_short(rf):
d2 = rf.readChunk(0, 100)
d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "The end."))
d2.addCallback(lambda ign: rf.close())
return d2
d.addCallback(_read_short)
return d return d
def test_openFile_write(self): def test_openFile_write(self):
d = self._set_up("openFile") d = self._set_up("openFile_write")
d.addCallback(lambda ign: self._set_up_tree()) d.addCallback(lambda ign: self._set_up_tree())
d.addCallback(lambda ign: d.addCallback(lambda ign:
self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile '' WRITE|CREAT|TRUNC", self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile '' WRITE|CREAT|TRUNC",
self.handler.openFile, "", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {})) self.handler.openFile, "", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {}))
d.addCallback(lambda ign: d.addCallback(lambda ign:
self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "openFile newfile WRITE|TRUNC", self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "openFile newfile WRITE|TRUNC",
self.handler.openFile, "newfile", sftp.FXF_WRITE | sftp.FXF_TRUNC, {})) self.handler.openFile, "newfile", sftp.FXF_WRITE | sftp.FXF_TRUNC, {}))
@ -560,15 +581,15 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
self.handler.openFile("newfile", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {})) self.handler.openFile("newfile", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {}))
def _write(wf): def _write(wf):
d2 = wf.writeChunk(0, "0123456789") d2 = wf.writeChunk(0, "0123456789")
d2.addCallback(lambda res: self.failUnlessEqual(res, None)) d2.addCallback(lambda res: self.failUnlessReallyEqual(res, None))
d2.addCallback(lambda ign: wf.writeChunk(8, "0123")) d2.addCallback(lambda ign: wf.writeChunk(8, "0123"))
d2.addCallback(lambda ign: wf.writeChunk(13, "abc")) d2.addCallback(lambda ign: wf.writeChunk(13, "abc"))
d2.addCallback(lambda ign: wf.getAttrs()) d2.addCallback(lambda ign: wf.getAttrs())
def _check_attrs(attrs): def _check_attrs(attrs):
self.failUnlessEqual(attrs['permissions'], S_IFREG | 0440) #FIXME self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0440) #FIXME
self.failUnlessEqual(attrs['size'], 16) self.failUnlessReallyEqual(attrs['size'], 16)
d2.addCallback(_check_attrs) d2.addCallback(_check_attrs)
d2.addCallback(lambda ign: wf.setAttrs({})) d2.addCallback(lambda ign: wf.setAttrs({}))
@ -579,7 +600,7 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d2.addCallback(lambda ign: wf.setAttrs({'size': 14})) d2.addCallback(lambda ign: wf.setAttrs({'size': 14}))
d2.addCallback(lambda ign: wf.getAttrs()) d2.addCallback(lambda ign: wf.getAttrs())
d2.addCallback(lambda attrs: self.failUnlessEqual(attrs['size'], 14)) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['size'], 14))
d2.addCallback(lambda ign: d2.addCallback(lambda ign:
self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "readChunk on write-only handle denied", self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "readChunk on write-only handle denied",
@ -599,7 +620,7 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d.addCallback(_write) d.addCallback(_write)
d.addCallback(lambda ign: self.root.get(u"newfile")) d.addCallback(lambda ign: self.root.get(u"newfile"))
d.addCallback(lambda node: download_to_data(node)) d.addCallback(lambda node: download_to_data(node))
d.addCallback(lambda data: self.failUnlessEqual(data, "012345670123\x00a")) d.addCallback(lambda data: self.failUnlessReallyEqual(data, "012345670123\x00a"))
# test APPEND flag, and also replacing an existing file ("newfile") # test APPEND flag, and also replacing an existing file ("newfile")
d.addCallback(lambda ign: d.addCallback(lambda ign:
@ -613,7 +634,7 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d.addCallback(_write_append) d.addCallback(_write_append)
d.addCallback(lambda ign: self.root.get(u"newfile")) d.addCallback(lambda ign: self.root.get(u"newfile"))
d.addCallback(lambda node: download_to_data(node)) d.addCallback(lambda node: download_to_data(node))
d.addCallback(lambda data: self.failUnlessEqual(data, "01234567890123")) d.addCallback(lambda data: self.failUnlessReallyEqual(data, "01234567890123"))
# test EXCL flag # test EXCL flag
d.addCallback(lambda ign: d.addCallback(lambda ign:
@ -622,7 +643,7 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
def _write_excl(wf): def _write_excl(wf):
d2 = self.root.get(u"excl") d2 = self.root.get(u"excl")
d2.addCallback(lambda node: download_to_data(node)) d2.addCallback(lambda node: download_to_data(node))
d2.addCallback(lambda data: self.failUnlessEqual(data, "")) d2.addCallback(lambda data: self.failUnlessReallyEqual(data, ""))
d2.addCallback(lambda ign: wf.writeChunk(0, "0123456789")) d2.addCallback(lambda ign: wf.writeChunk(0, "0123456789"))
d2.addCallback(lambda ign: wf.close()) d2.addCallback(lambda ign: wf.close())
@ -630,7 +651,31 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d.addCallback(_write_excl) d.addCallback(_write_excl)
d.addCallback(lambda ign: self.root.get(u"excl")) d.addCallback(lambda ign: self.root.get(u"excl"))
d.addCallback(lambda node: download_to_data(node)) d.addCallback(lambda node: download_to_data(node))
d.addCallback(lambda data: self.failUnlessEqual(data, "0123456789")) d.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789"))
"""
# test that writing a zero-length file with EXCL only updates the directory once
d.addCallback(lambda ign:
self.handler.openFile("zerolength", sftp.FXF_WRITE | sftp.FXF_CREAT |
sftp.FXF_EXCL, {}))
def _write_excl_zerolength(wf):
d2 = self.root.get(u"zerolength")
d2.addCallback(lambda node: download_to_data(node))
d2.addCallback(lambda data: self.failUnlessReallyEqual(data, ""))
# FIXME: no API to get the best version number exists (fix as part of #993)
#stash = {}
#d2.addCallback(lambda ign: self.root.get_best_version_number())
#d2.addCallback(lambda version: stash['version'] = version)
d2.addCallback(lambda ign: wf.close())
#d2.addCallback(lambda ign: self.root.get_best_version_number())
#d2.addCallback(lambda new_version: self.failUnlessReallyEqual(new_version, stash['version'])
return d2
d.addCallback(_write_excl_zerolength)
d.addCallback(lambda ign: self.root.get(u"zerolength"))
d.addCallback(lambda node: download_to_data(node))
d.addCallback(lambda data: self.failUnlessReallyEqual(data, ""))
"""
# test WRITE | CREAT without TRUNC # test WRITE | CREAT without TRUNC
d.addCallback(lambda ign: d.addCallback(lambda ign:
@ -642,7 +687,7 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d.addCallback(_write_notrunc) d.addCallback(_write_notrunc)
d.addCallback(lambda ign: self.root.get(u"newfile2")) d.addCallback(lambda ign: self.root.get(u"newfile2"))
d.addCallback(lambda node: download_to_data(node)) d.addCallback(lambda node: download_to_data(node))
d.addCallback(lambda data: self.failUnlessEqual(data, "0123456789")) d.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789"))
# test writing to a mutable file # test writing to a mutable file
d.addCallback(lambda ign: d.addCallback(lambda ign:
@ -655,10 +700,10 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
d.addCallback(lambda ign: self.root.get(u"mutable")) d.addCallback(lambda ign: self.root.get(u"mutable"))
def _check_same_file(node): def _check_same_file(node):
self.failUnless(node.is_mutable()) self.failUnless(node.is_mutable())
self.failUnlessEqual(node.get_uri(), self.mutable_uri) self.failUnlessReallyEqual(node.get_uri(), self.mutable_uri)
return node.download_best_version() return node.download_best_version()
d.addCallback(_check_same_file) d.addCallback(_check_same_file)
d.addCallback(lambda data: self.failUnlessEqual(data, "mutable new! contents")) d.addCallback(lambda data: self.failUnlessReallyEqual(data, "mutable new! contents"))
""" """
# test READ | WRITE without CREAT or TRUNC # test READ | WRITE without CREAT or TRUNC
@ -667,13 +712,13 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
def _read_write(rwf): def _read_write(rwf):
d2 = rwf.writeChunk(8, "0123") d2 = rwf.writeChunk(8, "0123")
d2.addCallback(lambda ign: rwf.readChunk(0, 100)) d2.addCallback(lambda ign: rwf.readChunk(0, 100))
d2.addCallback(lambda data: self.failUnlessEqual(data, "012345670123")) d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "012345670123"))
d2.addCallback(lambda ign: rwf.close()) d2.addCallback(lambda ign: rwf.close())
return d2 return d2
d.addCallback(_read_write) d.addCallback(_read_write)
d.addCallback(lambda ign: self.root.get(u"small")) d.addCallback(lambda ign: self.root.get(u"small"))
d.addCallback(lambda node: download_to_data(node)) d.addCallback(lambda node: download_to_data(node))
d.addCallback(lambda data: self.failUnlessEqual(data, "012345670123")) d.addCallback(lambda data: self.failUnlessReallyEqual(data, "012345670123"))
""" """
return d return d
@ -721,7 +766,7 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
self.shouldFail(NoSuchChildError, "removeFile small", "small", self.shouldFail(NoSuchChildError, "removeFile small", "small",
self.root.get, u"small")) self.root.get, u"small"))
d2.addCallback(lambda ign: rf.readChunk(0, 10)) d2.addCallback(lambda ign: rf.readChunk(0, 10))
d2.addCallback(lambda data: self.failUnlessEqual(data, "0123456789")) d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789"))
d2.addCallback(lambda ign: rf.close()) d2.addCallback(lambda ign: rf.close())
return d2 return d2
d.addCallback(_remove_and_read_small) d.addCallback(_remove_and_read_small)
@ -807,23 +852,23 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
# renaming a file to a correct path should succeed # renaming a file to a correct path should succeed
d.addCallback(lambda ign: self.handler.renameFile("small", "new_small")) d.addCallback(lambda ign: self.handler.renameFile("small", "new_small"))
d.addCallback(lambda ign: self.root.get(u"new_small")) d.addCallback(lambda ign: self.root.get(u"new_small"))
d.addCallback(lambda node: self.failUnlessEqual(node.get_uri(), self.small_uri)) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.small_uri))
# renaming a file into a subdirectory should succeed (also tests Unicode names) # renaming a file into a subdirectory should succeed (also tests Unicode names)
d.addCallback(lambda ign: self.handler.renameFile(u"gro\u00DF".encode('utf-8'), d.addCallback(lambda ign: self.handler.renameFile(u"gro\u00DF".encode('utf-8'),
u"loop/neue_gro\u00DF".encode('utf-8'))) u"loop/neue_gro\u00DF".encode('utf-8')))
d.addCallback(lambda ign: self.root.get(u"neue_gro\u00DF")) d.addCallback(lambda ign: self.root.get(u"neue_gro\u00DF"))
d.addCallback(lambda node: self.failUnlessEqual(node.get_uri(), self.gross_uri)) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.gross_uri))
# renaming a directory to a correct path should succeed # renaming a directory to a correct path should succeed
d.addCallback(lambda ign: self.handler.renameFile("tiny_lit_dir", "new_tiny_lit_dir")) d.addCallback(lambda ign: self.handler.renameFile("tiny_lit_dir", "new_tiny_lit_dir"))
d.addCallback(lambda ign: self.root.get(u"new_tiny_lit_dir")) d.addCallback(lambda ign: self.root.get(u"new_tiny_lit_dir"))
d.addCallback(lambda node: self.failUnlessEqual(node.get_uri(), self.tiny_lit_dir_uri)) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.tiny_lit_dir_uri))
# renaming an unknown to a correct path should succeed # renaming an unknown to a correct path should succeed
d.addCallback(lambda ign: self.handler.renameFile("unknown", "new_unknown")) d.addCallback(lambda ign: self.handler.renameFile("unknown", "new_unknown"))
d.addCallback(lambda ign: self.root.get(u"new_unknown")) d.addCallback(lambda ign: self.root.get(u"new_unknown"))
d.addCallback(lambda node: self.failUnlessEqual(node.get_uri(), self.unknown_uri)) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.unknown_uri))
return d return d
@ -840,9 +885,9 @@ class Handler(GridTestMixin, ShouldFailMixin, unittest.TestCase):
self.failUnless(child.is_mutable()) self.failUnless(child.is_mutable())
# FIXME # FIXME
#self.failUnless('ctime' in metadata, metadata) #self.failUnless('ctime' in metadata, metadata)
#self.failUnlessEqual(metadata['ctime'], 42) #self.failUnlessReallyEqual(metadata['ctime'], 42)
#self.failUnless('ext_foo' in metadata, metadata) #self.failUnless('ext_foo' in metadata, metadata)
#self.failUnlessEqual(metadata['ext_foo'], 'bar') #self.failUnlessReallyEqual(metadata['ext_foo'], 'bar')
# TODO: child should be empty # TODO: child should be empty
d.addCallback(_got) d.addCallback(_got)