test_cli.py: split the largest test classes out into separate files

That file was getting unruly. No behavioral changes, apart from tests
now having different names.
This commit is contained in:
Brian Warner 2015-02-22 22:31:14 -08:00
parent 5a37ff0d0e
commit be4b13c318
8 changed files with 2693 additions and 2616 deletions

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,475 @@
import os.path
from twisted.trial import unittest
from cStringIO import StringIO
import re
from mock import patch
from allmydata.util import fileutil
from allmydata.util.fileutil import abspath_expanduser_unicode
from allmydata.util.encodingutil import get_io_encoding, unicode_to_argv
from allmydata.scripts import cli, backupdb
from .common_util import StallMixin
from .no_network import GridTestMixin
from .test_cli import CLITestMixin, parse_options
timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
def writeto(self, path, data):
full_path = os.path.join(self.basedir, "home", path)
fileutil.make_dirs(os.path.dirname(full_path))
fileutil.write(full_path, data)
def count_output(self, out):
mo = re.search(r"(\d)+ files uploaded \((\d+) reused\), "
"(\d)+ files skipped, "
"(\d+) directories created \((\d+) reused\), "
"(\d+) directories skipped", out)
return [int(s) for s in mo.groups()]
def count_output2(self, out):
mo = re.search(r"(\d)+ files checked, (\d+) directories checked", out)
return [int(s) for s in mo.groups()]
def test_backup(self):
self.basedir = "cli/Backup/backup"
self.set_up_grid()
# is the backupdb available? If so, we test that a second backup does
# not create new directories.
hush = StringIO()
bdb = backupdb.get_backupdb(os.path.join(self.basedir, "dbtest"),
hush)
self.failUnless(bdb)
# create a small local directory with a couple of files
source = os.path.join(self.basedir, "home")
fileutil.make_dirs(os.path.join(source, "empty"))
self.writeto("parent/subdir/foo.txt", "foo")
self.writeto("parent/subdir/bar.txt", "bar\n" * 1000)
self.writeto("parent/blah.txt", "blah")
def do_backup(verbose=False):
cmd = ["backup"]
if verbose:
cmd.append("--verbose")
cmd.append(source)
cmd.append("tahoe:backups")
return self.do_cli(*cmd)
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda res: do_backup())
def _check0((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
fu, fr, fs, dc, dr, ds = self.count_output(out)
# foo.txt, bar.txt, blah.txt
self.failUnlessReallyEqual(fu, 3)
self.failUnlessReallyEqual(fr, 0)
self.failUnlessReallyEqual(fs, 0)
# empty, home, home/parent, home/parent/subdir
self.failUnlessReallyEqual(dc, 4)
self.failUnlessReallyEqual(dr, 0)
self.failUnlessReallyEqual(ds, 0)
d.addCallback(_check0)
d.addCallback(lambda res: self.do_cli("ls", "--uri", "tahoe:backups"))
def _check1((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
lines = out.split("\n")
children = dict([line.split() for line in lines if line])
latest_uri = children["Latest"]
self.failUnless(latest_uri.startswith("URI:DIR2-CHK:"), latest_uri)
childnames = children.keys()
self.failUnlessReallyEqual(sorted(childnames), ["Archives", "Latest"])
d.addCallback(_check1)
d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest"))
def _check2((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(sorted(out.split()), ["empty", "parent"])
d.addCallback(_check2)
d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest/empty"))
def _check2a((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(out.strip(), "")
d.addCallback(_check2a)
d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
def _check3((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(out, "foo")
d.addCallback(_check3)
d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
def _check4((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
self.old_archives = out.split()
self.failUnlessReallyEqual(len(self.old_archives), 1)
d.addCallback(_check4)
d.addCallback(self.stall, 1.1)
d.addCallback(lambda res: do_backup())
def _check4a((rc, out, err)):
# second backup should reuse everything, if the backupdb is
# available
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
fu, fr, fs, dc, dr, ds = self.count_output(out)
# foo.txt, bar.txt, blah.txt
self.failUnlessReallyEqual(fu, 0)
self.failUnlessReallyEqual(fr, 3)
self.failUnlessReallyEqual(fs, 0)
# empty, home, home/parent, home/parent/subdir
self.failUnlessReallyEqual(dc, 0)
self.failUnlessReallyEqual(dr, 4)
self.failUnlessReallyEqual(ds, 0)
d.addCallback(_check4a)
# sneak into the backupdb, crank back the "last checked"
# timestamp to force a check on all files
def _reset_last_checked(res):
dbfile = os.path.join(self.get_clientdir(),
"private", "backupdb.sqlite")
self.failUnless(os.path.exists(dbfile), dbfile)
bdb = backupdb.get_backupdb(dbfile)
bdb.cursor.execute("UPDATE last_upload SET last_checked=0")
bdb.cursor.execute("UPDATE directories SET last_checked=0")
bdb.connection.commit()
d.addCallback(_reset_last_checked)
d.addCallback(self.stall, 1.1)
d.addCallback(lambda res: do_backup(verbose=True))
def _check4b((rc, out, err)):
# we should check all files, and re-use all of them. None of
# the directories should have been changed, so we should
# re-use all of them too.
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
fu, fr, fs, dc, dr, ds = self.count_output(out)
fchecked, dchecked = self.count_output2(out)
self.failUnlessReallyEqual(fchecked, 3)
self.failUnlessReallyEqual(fu, 0)
self.failUnlessReallyEqual(fr, 3)
self.failUnlessReallyEqual(fs, 0)
self.failUnlessReallyEqual(dchecked, 4)
self.failUnlessReallyEqual(dc, 0)
self.failUnlessReallyEqual(dr, 4)
self.failUnlessReallyEqual(ds, 0)
d.addCallback(_check4b)
d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
def _check5((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
self.new_archives = out.split()
self.failUnlessReallyEqual(len(self.new_archives), 3, out)
# the original backup should still be the oldest (i.e. sorts
# alphabetically towards the beginning)
self.failUnlessReallyEqual(sorted(self.new_archives)[0],
self.old_archives[0])
d.addCallback(_check5)
d.addCallback(self.stall, 1.1)
def _modify(res):
self.writeto("parent/subdir/foo.txt", "FOOF!")
# and turn a file into a directory
os.unlink(os.path.join(source, "parent/blah.txt"))
os.mkdir(os.path.join(source, "parent/blah.txt"))
self.writeto("parent/blah.txt/surprise file", "surprise")
self.writeto("parent/blah.txt/surprisedir/subfile", "surprise")
# turn a directory into a file
os.rmdir(os.path.join(source, "empty"))
self.writeto("empty", "imagine nothing being here")
return do_backup()
d.addCallback(_modify)
def _check5a((rc, out, err)):
# second backup should reuse bar.txt (if backupdb is available),
# and upload the rest. None of the directories can be reused.
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
fu, fr, fs, dc, dr, ds = self.count_output(out)
# new foo.txt, surprise file, subfile, empty
self.failUnlessReallyEqual(fu, 4)
# old bar.txt
self.failUnlessReallyEqual(fr, 1)
self.failUnlessReallyEqual(fs, 0)
# home, parent, subdir, blah.txt, surprisedir
self.failUnlessReallyEqual(dc, 5)
self.failUnlessReallyEqual(dr, 0)
self.failUnlessReallyEqual(ds, 0)
d.addCallback(_check5a)
d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
def _check6((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
self.new_archives = out.split()
self.failUnlessReallyEqual(len(self.new_archives), 4)
self.failUnlessReallyEqual(sorted(self.new_archives)[0],
self.old_archives[0])
d.addCallback(_check6)
d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
def _check7((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(out, "FOOF!")
# the old snapshot should not be modified
return self.do_cli("get", "tahoe:backups/Archives/%s/parent/subdir/foo.txt" % self.old_archives[0])
d.addCallback(_check7)
def _check8((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(out, "foo")
d.addCallback(_check8)
return d
# on our old dapper buildslave, this test takes a long time (usually
# 130s), so we have to bump up the default 120s timeout. The create-alias
# and initial backup alone take 60s, probably because of the handful of
# dirnodes being created (RSA key generation). The backup between check4
# and check4a takes 6s, as does the backup before check4b.
test_backup.timeout = 3000
def _check_filtering(self, filtered, all, included, excluded):
filtered = set(filtered)
all = set(all)
included = set(included)
excluded = set(excluded)
self.failUnlessReallyEqual(filtered, included)
self.failUnlessReallyEqual(all.difference(filtered), excluded)
def test_exclude_options(self):
root_listdir = (u'lib.a', u'_darcs', u'subdir', u'nice_doc.lyx')
subdir_listdir = (u'another_doc.lyx', u'run_snake_run.py', u'CVS', u'.svn', u'_darcs')
basedir = "cli/Backup/exclude_options"
fileutil.make_dirs(basedir)
nodeurl_path = os.path.join(basedir, 'node.url')
fileutil.write(nodeurl_path, 'http://example.net:2357/')
def parse(args): return parse_options(basedir, "backup", args)
# test simple exclude
backup_options = parse(['--exclude', '*lyx', 'from', 'to'])
filtered = list(backup_options.filter_listdir(root_listdir))
self._check_filtering(filtered, root_listdir, (u'lib.a', u'_darcs', u'subdir'),
(u'nice_doc.lyx',))
# multiple exclude
backup_options = parse(['--exclude', '*lyx', '--exclude', 'lib.?', 'from', 'to'])
filtered = list(backup_options.filter_listdir(root_listdir))
self._check_filtering(filtered, root_listdir, (u'_darcs', u'subdir'),
(u'nice_doc.lyx', u'lib.a'))
# vcs metadata exclusion
backup_options = parse(['--exclude-vcs', 'from', 'to'])
filtered = list(backup_options.filter_listdir(subdir_listdir))
self._check_filtering(filtered, subdir_listdir, (u'another_doc.lyx', u'run_snake_run.py',),
(u'CVS', u'.svn', u'_darcs'))
# read exclude patterns from file
exclusion_string = "_darcs\n*py\n.svn"
excl_filepath = os.path.join(basedir, 'exclusion')
fileutil.write(excl_filepath, exclusion_string)
backup_options = parse(['--exclude-from', excl_filepath, 'from', 'to'])
filtered = list(backup_options.filter_listdir(subdir_listdir))
self._check_filtering(filtered, subdir_listdir, (u'another_doc.lyx', u'CVS'),
(u'.svn', u'_darcs', u'run_snake_run.py'))
# test BackupConfigurationError
self.failUnlessRaises(cli.BackupConfigurationError,
parse,
['--exclude-from', excl_filepath + '.no', 'from', 'to'])
# test that an iterator works too
backup_options = parse(['--exclude', '*lyx', 'from', 'to'])
filtered = list(backup_options.filter_listdir(iter(root_listdir)))
self._check_filtering(filtered, root_listdir, (u'lib.a', u'_darcs', u'subdir'),
(u'nice_doc.lyx',))
def test_exclude_options_unicode(self):
nice_doc = u"nice_d\u00F8c.lyx"
try:
doc_pattern_arg = u"*d\u00F8c*".encode(get_io_encoding())
except UnicodeEncodeError:
raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.")
root_listdir = (u'lib.a', u'_darcs', u'subdir', nice_doc)
basedir = "cli/Backup/exclude_options_unicode"
fileutil.make_dirs(basedir)
nodeurl_path = os.path.join(basedir, 'node.url')
fileutil.write(nodeurl_path, 'http://example.net:2357/')
def parse(args): return parse_options(basedir, "backup", args)
# test simple exclude
backup_options = parse(['--exclude', doc_pattern_arg, 'from', 'to'])
filtered = list(backup_options.filter_listdir(root_listdir))
self._check_filtering(filtered, root_listdir, (u'lib.a', u'_darcs', u'subdir'),
(nice_doc,))
# multiple exclude
backup_options = parse(['--exclude', doc_pattern_arg, '--exclude', 'lib.?', 'from', 'to'])
filtered = list(backup_options.filter_listdir(root_listdir))
self._check_filtering(filtered, root_listdir, (u'_darcs', u'subdir'),
(nice_doc, u'lib.a'))
# read exclude patterns from file
exclusion_string = doc_pattern_arg + "\nlib.?"
excl_filepath = os.path.join(basedir, 'exclusion')
fileutil.write(excl_filepath, exclusion_string)
backup_options = parse(['--exclude-from', excl_filepath, 'from', 'to'])
filtered = list(backup_options.filter_listdir(root_listdir))
self._check_filtering(filtered, root_listdir, (u'_darcs', u'subdir'),
(nice_doc, u'lib.a'))
# test that an iterator works too
backup_options = parse(['--exclude', doc_pattern_arg, 'from', 'to'])
filtered = list(backup_options.filter_listdir(iter(root_listdir)))
self._check_filtering(filtered, root_listdir, (u'lib.a', u'_darcs', u'subdir'),
(nice_doc,))
@patch('__builtin__.file')
def test_exclude_from_tilde_expansion(self, mock):
basedir = "cli/Backup/exclude_from_tilde_expansion"
fileutil.make_dirs(basedir)
nodeurl_path = os.path.join(basedir, 'node.url')
fileutil.write(nodeurl_path, 'http://example.net:2357/')
def parse(args): return parse_options(basedir, "backup", args)
# ensure that tilde expansion is performed on exclude-from argument
exclude_file = u'~/.tahoe/excludes.dummy'
mock.return_value = StringIO()
parse(['--exclude-from', unicode_to_argv(exclude_file), 'from', 'to'])
self.failUnlessIn(((abspath_expanduser_unicode(exclude_file),), {}), mock.call_args_list)
def test_ignore_symlinks(self):
if not hasattr(os, 'symlink'):
raise unittest.SkipTest("Symlinks are not supported by Python on this platform.")
self.basedir = os.path.dirname(self.mktemp())
self.set_up_grid()
source = os.path.join(self.basedir, "home")
self.writeto("foo.txt", "foo")
os.symlink(os.path.join(source, "foo.txt"), os.path.join(source, "foo2.txt"))
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda res: self.do_cli("backup", "--verbose", source, "tahoe:test"))
def _check((rc, out, err)):
self.failUnlessReallyEqual(rc, 2)
foo2 = os.path.join(source, "foo2.txt")
self.failUnlessIn("WARNING: cannot backup symlink ", err)
self.failUnlessIn(foo2, err)
fu, fr, fs, dc, dr, ds = self.count_output(out)
# foo.txt
self.failUnlessReallyEqual(fu, 1)
self.failUnlessReallyEqual(fr, 0)
# foo2.txt
self.failUnlessReallyEqual(fs, 1)
# home
self.failUnlessReallyEqual(dc, 1)
self.failUnlessReallyEqual(dr, 0)
self.failUnlessReallyEqual(ds, 0)
d.addCallback(_check)
return d
def test_ignore_unreadable_file(self):
self.basedir = os.path.dirname(self.mktemp())
self.set_up_grid()
source = os.path.join(self.basedir, "home")
self.writeto("foo.txt", "foo")
os.chmod(os.path.join(source, "foo.txt"), 0000)
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda res: self.do_cli("backup", source, "tahoe:test"))
def _check((rc, out, err)):
self.failUnlessReallyEqual(rc, 2)
self.failUnlessReallyEqual(err, "WARNING: permission denied on file %s\n" % os.path.join(source, "foo.txt"))
fu, fr, fs, dc, dr, ds = self.count_output(out)
self.failUnlessReallyEqual(fu, 0)
self.failUnlessReallyEqual(fr, 0)
# foo.txt
self.failUnlessReallyEqual(fs, 1)
# home
self.failUnlessReallyEqual(dc, 1)
self.failUnlessReallyEqual(dr, 0)
self.failUnlessReallyEqual(ds, 0)
d.addCallback(_check)
# This is necessary for the temp files to be correctly removed
def _cleanup(self):
os.chmod(os.path.join(source, "foo.txt"), 0644)
d.addCallback(_cleanup)
d.addErrback(_cleanup)
return d
def test_ignore_unreadable_directory(self):
self.basedir = os.path.dirname(self.mktemp())
self.set_up_grid()
source = os.path.join(self.basedir, "home")
os.mkdir(source)
os.mkdir(os.path.join(source, "test"))
os.chmod(os.path.join(source, "test"), 0000)
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda res: self.do_cli("backup", source, "tahoe:test"))
def _check((rc, out, err)):
self.failUnlessReallyEqual(rc, 2)
self.failUnlessReallyEqual(err, "WARNING: permission denied on directory %s\n" % os.path.join(source, "test"))
fu, fr, fs, dc, dr, ds = self.count_output(out)
self.failUnlessReallyEqual(fu, 0)
self.failUnlessReallyEqual(fr, 0)
self.failUnlessReallyEqual(fs, 0)
# home, test
self.failUnlessReallyEqual(dc, 2)
self.failUnlessReallyEqual(dr, 0)
# test
self.failUnlessReallyEqual(ds, 1)
d.addCallback(_check)
# This is necessary for the temp files to be correctly removed
def _cleanup(self):
os.chmod(os.path.join(source, "test"), 0655)
d.addCallback(_cleanup)
d.addErrback(_cleanup)
return d
def test_backup_without_alias(self):
# 'tahoe backup' should output a sensible error message when invoked
# without an alias instead of a stack trace.
self.basedir = os.path.dirname(self.mktemp())
self.set_up_grid()
source = os.path.join(self.basedir, "file1")
d = self.do_cli('backup', source, source)
def _check((rc, out, err)):
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("error:", err)
self.failUnlessReallyEqual(out, "")
d.addCallback(_check)
return d
def test_backup_with_nonexistent_alias(self):
# 'tahoe backup' should output a sensible error message when invoked
# with a nonexistent alias.
self.basedir = os.path.dirname(self.mktemp())
self.set_up_grid()
source = os.path.join(self.basedir, "file1")
d = self.do_cli("backup", source, "nonexistent:" + source)
def _check((rc, out, err)):
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("error:", err)
self.failUnlessIn("nonexistent", err)
self.failUnlessReallyEqual(out, "")
d.addCallback(_check)
return d

View File

@ -0,0 +1,415 @@
import os.path
import simplejson
from twisted.trial import unittest
from cStringIO import StringIO
from allmydata import uri
from allmydata.util import base32
from allmydata.util.encodingutil import quote_output, to_str
from allmydata.mutable.publish import MutableData
from allmydata.immutable import upload
from allmydata.scripts import debug
from .no_network import GridTestMixin
from .test_cli import CLITestMixin
timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
def test_check(self):
self.basedir = "cli/Check/check"
self.set_up_grid()
c0 = self.g.clients[0]
DATA = "data" * 100
DATA_uploadable = MutableData(DATA)
d = c0.create_mutable_file(DATA_uploadable)
def _stash_uri(n):
self.uri = n.get_uri()
d.addCallback(_stash_uri)
d.addCallback(lambda ign: self.do_cli("check", self.uri))
def _check1((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
lines = out.splitlines()
self.failUnless("Summary: Healthy" in lines, out)
self.failUnless(" good-shares: 10 (encoding is 3-of-10)" in lines, out)
d.addCallback(_check1)
d.addCallback(lambda ign: self.do_cli("check", "--raw", self.uri))
def _check2((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
data = simplejson.loads(out)
self.failUnlessReallyEqual(to_str(data["summary"]), "Healthy")
self.failUnlessReallyEqual(data["results"]["healthy"], True)
d.addCallback(_check2)
d.addCallback(lambda ign: c0.upload(upload.Data("literal", convergence="")))
def _stash_lit_uri(n):
self.lit_uri = n.get_uri()
d.addCallback(_stash_lit_uri)
d.addCallback(lambda ign: self.do_cli("check", self.lit_uri))
def _check_lit((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
lines = out.splitlines()
self.failUnless("Summary: Healthy (LIT)" in lines, out)
d.addCallback(_check_lit)
d.addCallback(lambda ign: self.do_cli("check", "--raw", self.lit_uri))
def _check_lit_raw((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
data = simplejson.loads(out)
self.failUnlessReallyEqual(data["results"]["healthy"], True)
d.addCallback(_check_lit_raw)
d.addCallback(lambda ign: c0.create_immutable_dirnode({}, convergence=""))
def _stash_lit_dir_uri(n):
self.lit_dir_uri = n.get_uri()
d.addCallback(_stash_lit_dir_uri)
d.addCallback(lambda ign: self.do_cli("check", self.lit_dir_uri))
d.addCallback(_check_lit)
d.addCallback(lambda ign: self.do_cli("check", "--raw", self.lit_uri))
d.addCallback(_check_lit_raw)
def _clobber_shares(ignored):
# delete one, corrupt a second
shares = self.find_uri_shares(self.uri)
self.failUnlessReallyEqual(len(shares), 10)
os.unlink(shares[0][2])
cso = debug.CorruptShareOptions()
cso.stdout = StringIO()
cso.parseOptions([shares[1][2]])
storage_index = uri.from_string(self.uri).get_storage_index()
self._corrupt_share_line = " server %s, SI %s, shnum %d" % \
(base32.b2a(shares[1][1]),
base32.b2a(storage_index),
shares[1][0])
debug.corrupt_share(cso)
d.addCallback(_clobber_shares)
d.addCallback(lambda ign: self.do_cli("check", "--verify", self.uri))
def _check3((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
lines = out.splitlines()
summary = [l for l in lines if l.startswith("Summary")][0]
self.failUnless("Summary: Unhealthy: 8 shares (enc 3-of-10)"
in summary, summary)
self.failUnless(" good-shares: 8 (encoding is 3-of-10)" in lines, out)
self.failUnless(" corrupt shares:" in lines, out)
self.failUnless(self._corrupt_share_line in lines, out)
d.addCallback(_check3)
d.addCallback(lambda ign: self.do_cli("check", "--verify", "--raw", self.uri))
def _check3_raw((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
data = simplejson.loads(out)
self.failUnlessReallyEqual(data["results"]["healthy"], False)
self.failUnlessIn("Unhealthy: 8 shares (enc 3-of-10)", data["summary"])
self.failUnlessReallyEqual(data["results"]["count-shares-good"], 8)
self.failUnlessReallyEqual(data["results"]["count-corrupt-shares"], 1)
self.failUnlessIn("list-corrupt-shares", data["results"])
d.addCallback(_check3_raw)
d.addCallback(lambda ign:
self.do_cli("check", "--verify", "--repair", self.uri))
def _check4((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
lines = out.splitlines()
self.failUnless("Summary: not healthy" in lines, out)
self.failUnless(" good-shares: 8 (encoding is 3-of-10)" in lines, out)
self.failUnless(" corrupt shares:" in lines, out)
self.failUnless(self._corrupt_share_line in lines, out)
self.failUnless(" repair successful" in lines, out)
d.addCallback(_check4)
d.addCallback(lambda ign:
self.do_cli("check", "--verify", "--repair", self.uri))
def _check5((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
lines = out.splitlines()
self.failUnless("Summary: healthy" in lines, out)
self.failUnless(" good-shares: 10 (encoding is 3-of-10)" in lines, out)
self.failIf(" corrupt shares:" in lines, out)
d.addCallback(_check5)
return d
def test_deep_check(self):
self.basedir = "cli/Check/deep_check"
self.set_up_grid()
c0 = self.g.clients[0]
self.uris = {}
self.fileurls = {}
DATA = "data" * 100
quoted_good = quote_output(u"g\u00F6\u00F6d")
d = c0.create_dirnode()
def _stash_root_and_create_file(n):
self.rootnode = n
self.rooturi = n.get_uri()
return n.add_file(u"g\u00F6\u00F6d", upload.Data(DATA, convergence=""))
d.addCallback(_stash_root_and_create_file)
def _stash_uri(fn, which):
self.uris[which] = fn.get_uri()
return fn
d.addCallback(_stash_uri, u"g\u00F6\u00F6d")
d.addCallback(lambda ign:
self.rootnode.add_file(u"small",
upload.Data("literal",
convergence="")))
d.addCallback(_stash_uri, "small")
d.addCallback(lambda ign:
c0.create_mutable_file(MutableData(DATA+"1")))
d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
d.addCallback(_stash_uri, "mutable")
d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
def _check1((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
lines = out.splitlines()
self.failUnless("done: 4 objects checked, 4 healthy, 0 unhealthy"
in lines, out)
d.addCallback(_check1)
# root
# root/g\u00F6\u00F6d
# root/small
# root/mutable
d.addCallback(lambda ign: self.do_cli("deep-check", "--verbose",
self.rooturi))
def _check2((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
lines = out.splitlines()
self.failUnless("'<root>': Healthy" in lines, out)
self.failUnless("'small': Healthy (LIT)" in lines, out)
self.failUnless((quoted_good + ": Healthy") in lines, out)
self.failUnless("'mutable': Healthy" in lines, out)
self.failUnless("done: 4 objects checked, 4 healthy, 0 unhealthy"
in lines, out)
d.addCallback(_check2)
d.addCallback(lambda ign: self.do_cli("stats", self.rooturi))
def _check_stats((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
lines = out.splitlines()
self.failUnlessIn(" count-immutable-files: 1", lines)
self.failUnlessIn(" count-mutable-files: 1", lines)
self.failUnlessIn(" count-literal-files: 1", lines)
self.failUnlessIn(" count-directories: 1", lines)
self.failUnlessIn(" size-immutable-files: 400", lines)
self.failUnlessIn("Size Histogram:", lines)
self.failUnlessIn(" 4-10 : 1 (10 B, 10 B)", lines)
self.failUnlessIn(" 317-1000 : 1 (1000 B, 1000 B)", lines)
d.addCallback(_check_stats)
def _clobber_shares(ignored):
shares = self.find_uri_shares(self.uris[u"g\u00F6\u00F6d"])
self.failUnlessReallyEqual(len(shares), 10)
os.unlink(shares[0][2])
shares = self.find_uri_shares(self.uris["mutable"])
cso = debug.CorruptShareOptions()
cso.stdout = StringIO()
cso.parseOptions([shares[1][2]])
storage_index = uri.from_string(self.uris["mutable"]).get_storage_index()
self._corrupt_share_line = " corrupt: server %s, SI %s, shnum %d" % \
(base32.b2a(shares[1][1]),
base32.b2a(storage_index),
shares[1][0])
debug.corrupt_share(cso)
d.addCallback(_clobber_shares)
# root
# root/g\u00F6\u00F6d [9 shares]
# root/small
# root/mutable [1 corrupt share]
d.addCallback(lambda ign:
self.do_cli("deep-check", "--verbose", self.rooturi))
def _check3((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
lines = out.splitlines()
self.failUnless("'<root>': Healthy" in lines, out)
self.failUnless("'small': Healthy (LIT)" in lines, out)
self.failUnless("'mutable': Healthy" in lines, out) # needs verifier
self.failUnless((quoted_good + ": Not Healthy: 9 shares (enc 3-of-10)") in lines, out)
self.failIf(self._corrupt_share_line in lines, out)
self.failUnless("done: 4 objects checked, 3 healthy, 1 unhealthy"
in lines, out)
d.addCallback(_check3)
d.addCallback(lambda ign:
self.do_cli("deep-check", "--verbose", "--verify",
self.rooturi))
def _check4((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
lines = out.splitlines()
self.failUnless("'<root>': Healthy" in lines, out)
self.failUnless("'small': Healthy (LIT)" in lines, out)
mutable = [l for l in lines if l.startswith("'mutable'")][0]
self.failUnless(mutable.startswith("'mutable': Unhealthy: 9 shares (enc 3-of-10)"),
mutable)
self.failUnless(self._corrupt_share_line in lines, out)
self.failUnless((quoted_good + ": Not Healthy: 9 shares (enc 3-of-10)") in lines, out)
self.failUnless("done: 4 objects checked, 2 healthy, 2 unhealthy"
in lines, out)
d.addCallback(_check4)
d.addCallback(lambda ign:
self.do_cli("deep-check", "--raw",
self.rooturi))
def _check5((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
lines = out.splitlines()
units = [simplejson.loads(line) for line in lines]
# root, small, g\u00F6\u00F6d, mutable, stats
self.failUnlessReallyEqual(len(units), 4+1)
d.addCallback(_check5)
d.addCallback(lambda ign:
self.do_cli("deep-check",
"--verbose", "--verify", "--repair",
self.rooturi))
def _check6((rc, out, err)):
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(rc, 0)
lines = out.splitlines()
self.failUnless("'<root>': healthy" in lines, out)
self.failUnless("'small': healthy" in lines, out)
self.failUnless("'mutable': not healthy" in lines, out)
self.failUnless(self._corrupt_share_line in lines, out)
self.failUnless((quoted_good + ": not healthy") in lines, out)
self.failUnless("done: 4 objects checked" in lines, out)
self.failUnless(" pre-repair: 2 healthy, 2 unhealthy" in lines, out)
self.failUnless(" 2 repairs attempted, 2 successful, 0 failed"
in lines, out)
self.failUnless(" post-repair: 4 healthy, 0 unhealthy" in lines,out)
d.addCallback(_check6)
# now add a subdir, and a file below that, then make the subdir
# unrecoverable
d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"subdir"))
d.addCallback(_stash_uri, "subdir")
d.addCallback(lambda fn:
fn.add_file(u"subfile", upload.Data(DATA+"2", "")))
d.addCallback(lambda ign:
self.delete_shares_numbered(self.uris["subdir"],
range(10)))
# root
# rootg\u00F6\u00F6d/
# root/small
# root/mutable
# root/subdir [unrecoverable: 0 shares]
# root/subfile
d.addCallback(lambda ign: self.do_cli("manifest", self.rooturi))
def _manifest_failed((rc, out, err)):
self.failIfEqual(rc, 0)
self.failUnlessIn("ERROR: UnrecoverableFileError", err)
# the fatal directory should still show up, as the last line
self.failUnlessIn(" subdir\n", out)
d.addCallback(_manifest_failed)
d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
def _deep_check_failed((rc, out, err)):
self.failIfEqual(rc, 0)
self.failUnlessIn("ERROR: UnrecoverableFileError", err)
# we want to make sure that the error indication is the last
# thing that gets emitted
self.failIf("done:" in out, out)
d.addCallback(_deep_check_failed)
# this test is disabled until the deep-repair response to an
# unrepairable directory is fixed. The failure-to-repair should not
# throw an exception, but the failure-to-traverse that follows
# should throw UnrecoverableFileError.
#d.addCallback(lambda ign:
# self.do_cli("deep-check", "--repair", self.rooturi))
#def _deep_check_repair_failed((rc, out, err)):
# self.failIfEqual(rc, 0)
# print err
# self.failUnlessIn("ERROR: UnrecoverableFileError", err)
# self.failIf("done:" in out, out)
#d.addCallback(_deep_check_repair_failed)
return d
def test_check_without_alias(self):
# 'tahoe check' should output a sensible error message if it needs to
# find the default alias and can't
self.basedir = "cli/Check/check_without_alias"
self.set_up_grid()
d = self.do_cli("check")
def _check((rc, out, err)):
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("error:", err)
self.failUnlessReallyEqual(out, "")
d.addCallback(_check)
d.addCallback(lambda ign: self.do_cli("deep-check"))
d.addCallback(_check)
return d
def test_check_with_nonexistent_alias(self):
# 'tahoe check' should output a sensible error message if it needs to
# find an alias and can't.
self.basedir = "cli/Check/check_with_nonexistent_alias"
self.set_up_grid()
d = self.do_cli("check", "nonexistent:")
def _check((rc, out, err)):
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("error:", err)
self.failUnlessIn("nonexistent", err)
self.failUnlessReallyEqual(out, "")
d.addCallback(_check)
return d
def test_check_with_multiple_aliases(self):
self.basedir = "cli/Check/check_with_multiple_aliases"
self.set_up_grid()
self.uriList = []
c0 = self.g.clients[0]
d = c0.create_dirnode()
def _stash_uri(n):
self.uriList.append(n.get_uri())
d.addCallback(_stash_uri)
d = c0.create_dirnode()
d.addCallback(_stash_uri)
d.addCallback(lambda ign: self.do_cli("check", self.uriList[0], self.uriList[1]))
def _check((rc, out, err)):
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(err, "")
#Ensure healthy appears for each uri
self.failUnlessIn("Healthy", out[:len(out)/2])
self.failUnlessIn("Healthy", out[len(out)/2:])
d.addCallback(_check)
d.addCallback(lambda ign: self.do_cli("check", self.uriList[0], "nonexistent:"))
def _check2((rc, out, err)):
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("Healthy", out)
self.failUnlessIn("error:", err)
self.failUnlessIn("nonexistent", err)
d.addCallback(_check2)
return d

View File

@ -0,0 +1,655 @@
import os.path, simplejson
from twisted.trial import unittest
from twisted.python import usage
from allmydata.scripts import cli
from allmydata.util import fileutil
from allmydata.util.encodingutil import (quote_output, get_io_encoding,
unicode_to_output, to_str)
from .no_network import GridTestMixin
from .test_cli import CLITestMixin
timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
def test_not_enough_args(self):
o = cli.CpOptions()
self.failUnlessRaises(usage.UsageError,
o.parseOptions, ["onearg"])
def test_unicode_filename(self):
self.basedir = "cli/Cp/unicode_filename"
fn1 = os.path.join(unicode(self.basedir), u"\u00C4rtonwall")
try:
fn1_arg = fn1.encode(get_io_encoding())
artonwall_arg = u"\u00C4rtonwall".encode(get_io_encoding())
except UnicodeEncodeError:
raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.")
self.skip_if_cannot_represent_filename(fn1)
self.set_up_grid()
DATA1 = "unicode file content"
fileutil.write(fn1, DATA1)
fn2 = os.path.join(self.basedir, "Metallica")
DATA2 = "non-unicode file content"
fileutil.write(fn2, DATA2)
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda res: self.do_cli("cp", fn1_arg, "tahoe:"))
d.addCallback(lambda res: self.do_cli("get", "tahoe:" + artonwall_arg))
d.addCallback(lambda (rc,out,err): self.failUnlessReallyEqual(out, DATA1))
d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:"))
d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica"))
d.addCallback(lambda (rc,out,err): self.failUnlessReallyEqual(out, DATA2))
d.addCallback(lambda res: self.do_cli("ls", "tahoe:"))
def _check((rc, out, err)):
try:
unicode_to_output(u"\u00C4rtonwall")
except UnicodeEncodeError:
self.failUnlessReallyEqual(rc, 1)
self.failUnlessReallyEqual(out, "Metallica\n")
self.failUnlessIn(quote_output(u"\u00C4rtonwall"), err)
self.failUnlessIn("files whose names could not be converted", err)
else:
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(out.decode(get_io_encoding()), u"Metallica\n\u00C4rtonwall\n")
self.failUnlessReallyEqual(err, "")
d.addCallback(_check)
return d
def test_dangling_symlink_vs_recursion(self):
if not hasattr(os, 'symlink'):
raise unittest.SkipTest("Symlinks are not supported by Python on this platform.")
# cp -r on a directory containing a dangling symlink shouldn't assert
self.basedir = "cli/Cp/dangling_symlink_vs_recursion"
self.set_up_grid()
dn = os.path.join(self.basedir, "dir")
os.mkdir(dn)
fn = os.path.join(dn, "Fakebandica")
ln = os.path.join(dn, "link")
os.symlink(fn, ln)
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda res: self.do_cli("cp", "--recursive",
dn, "tahoe:"))
return d
def test_copy_using_filecap(self):
self.basedir = "cli/Cp/test_copy_using_filecap"
self.set_up_grid()
outdir = os.path.join(self.basedir, "outdir")
os.mkdir(outdir)
fn1 = os.path.join(self.basedir, "Metallica")
fn2 = os.path.join(outdir, "Not Metallica")
fn3 = os.path.join(outdir, "test2")
DATA1 = "puppies" * 10000
fileutil.write(fn1, DATA1)
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda ign: self.do_cli("put", fn1))
def _put_file((rc, out, err)):
self.failUnlessReallyEqual(rc, 0)
self.failUnlessIn("200 OK", err)
# keep track of the filecap
self.filecap = out.strip()
d.addCallback(_put_file)
# Let's try copying this to the disk using the filecap.
d.addCallback(lambda ign: self.do_cli("cp", self.filecap, fn2))
def _copy_file((rc, out, err)):
self.failUnlessReallyEqual(rc, 0)
results = fileutil.read(fn2)
self.failUnlessReallyEqual(results, DATA1)
d.addCallback(_copy_file)
# Test copying a filecap to local dir, which should fail without a
# destination filename (#761).
d.addCallback(lambda ign: self.do_cli("cp", self.filecap, outdir))
def _resp((rc, out, err)):
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("error: you must specify a destination filename",
err)
self.failUnlessReallyEqual(out, "")
d.addCallback(_resp)
# Create a directory, linked at tahoe:test .
d.addCallback(lambda ign: self.do_cli("mkdir", "tahoe:test"))
def _get_dir((rc, out, err)):
self.failUnlessReallyEqual(rc, 0)
self.dircap = out.strip()
d.addCallback(_get_dir)
# Upload a file to the directory.
d.addCallback(lambda ign:
self.do_cli("put", fn1, "tahoe:test/test_file"))
d.addCallback(lambda (rc, out, err): self.failUnlessReallyEqual(rc, 0))
# Copying DIRCAP/filename to a local dir should work, because the
# destination filename can be inferred.
d.addCallback(lambda ign:
self.do_cli("cp", self.dircap + "/test_file", outdir))
def _get_resp((rc, out, err)):
self.failUnlessReallyEqual(rc, 0)
results = fileutil.read(os.path.join(outdir, "test_file"))
self.failUnlessReallyEqual(results, DATA1)
d.addCallback(_get_resp)
# ... and to an explicit filename different from the source filename.
d.addCallback(lambda ign:
self.do_cli("cp", self.dircap + "/test_file", fn3))
def _get_resp2((rc, out, err)):
self.failUnlessReallyEqual(rc, 0)
results = fileutil.read(fn3)
self.failUnlessReallyEqual(results, DATA1)
d.addCallback(_get_resp2)
# Test that the --verbose option prints correct indices (#1805).
d.addCallback(lambda ign:
self.do_cli("cp", "--verbose", fn3, self.dircap))
def _test_for_wrong_indices((rc, out, err)):
lines = err.split('\n')
self.failUnlessIn('examining 1 of 1', lines)
self.failUnlessIn('starting copy, 1 files, 1 directories', lines)
self.failIfIn('examining 0 of', err)
d.addCallback(_test_for_wrong_indices)
return d
def test_cp_with_nonexistent_alias(self):
# when invoked with an alias or aliases that don't exist, 'tahoe cp'
# should output a sensible error message rather than a stack trace.
self.basedir = "cli/Cp/cp_with_nonexistent_alias"
self.set_up_grid()
d = self.do_cli("cp", "fake:file1", "fake:file2")
def _check((rc, out, err)):
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("error:", err)
d.addCallback(_check)
# 'tahoe cp' actually processes the target argument first, so we need
# to check to make sure that validation extends to the source
# argument.
d.addCallback(lambda ign: self.do_cli("create-alias", "tahoe"))
d.addCallback(lambda ign: self.do_cli("cp", "fake:file1",
"tahoe:file2"))
d.addCallback(_check)
return d
def test_unicode_dirnames(self):
self.basedir = "cli/Cp/unicode_dirnames"
fn1 = os.path.join(unicode(self.basedir), u"\u00C4rtonwall")
try:
fn1_arg = fn1.encode(get_io_encoding())
del fn1_arg # hush pyflakes
artonwall_arg = u"\u00C4rtonwall".encode(get_io_encoding())
except UnicodeEncodeError:
raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.")
self.skip_if_cannot_represent_filename(fn1)
self.set_up_grid()
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda res: self.do_cli("mkdir", "tahoe:test/" + artonwall_arg))
d.addCallback(lambda res: self.do_cli("cp", "-r", "tahoe:test", "tahoe:test2"))
d.addCallback(lambda res: self.do_cli("ls", "tahoe:test2/test"))
def _check((rc, out, err)):
try:
unicode_to_output(u"\u00C4rtonwall")
except UnicodeEncodeError:
self.failUnlessReallyEqual(rc, 1)
self.failUnlessReallyEqual(out, "")
self.failUnlessIn(quote_output(u"\u00C4rtonwall"), err)
self.failUnlessIn("files whose names could not be converted", err)
else:
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(out.decode(get_io_encoding()), u"\u00C4rtonwall\n")
self.failUnlessReallyEqual(err, "")
d.addCallback(_check)
return d
def test_cp_replaces_mutable_file_contents(self):
self.basedir = "cli/Cp/cp_replaces_mutable_file_contents"
self.set_up_grid()
# Write a test file, which we'll copy to the grid.
test_txt_path = os.path.join(self.basedir, "test.txt")
test_txt_contents = "foo bar baz"
f = open(test_txt_path, "w")
f.write(test_txt_contents)
f.close()
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda ignored:
self.do_cli("mkdir", "tahoe:test"))
# We have to use 'tahoe put' here because 'tahoe cp' doesn't
# know how to make mutable files at the destination.
d.addCallback(lambda ignored:
self.do_cli("put", "--mutable", test_txt_path, "tahoe:test/test.txt"))
d.addCallback(lambda ignored:
self.do_cli("get", "tahoe:test/test.txt"))
def _check((rc, out, err)):
self.failUnlessEqual(rc, 0)
self.failUnlessEqual(out, test_txt_contents)
d.addCallback(_check)
# We'll do ls --json to get the read uri and write uri for the
# file we've just uploaded.
d.addCallback(lambda ignored:
self.do_cli("ls", "--json", "tahoe:test/test.txt"))
def _get_test_txt_uris((rc, out, err)):
self.failUnlessEqual(rc, 0)
filetype, data = simplejson.loads(out)
self.failUnlessEqual(filetype, "filenode")
self.failUnless(data['mutable'])
self.failUnlessIn("rw_uri", data)
self.rw_uri = to_str(data["rw_uri"])
self.failUnlessIn("ro_uri", data)
self.ro_uri = to_str(data["ro_uri"])
d.addCallback(_get_test_txt_uris)
# Now make a new file to copy in place of test.txt.
new_txt_path = os.path.join(self.basedir, "new.txt")
new_txt_contents = "baz bar foo" * 100000
f = open(new_txt_path, "w")
f.write(new_txt_contents)
f.close()
# Copy the new file on top of the old file.
d.addCallback(lambda ignored:
self.do_cli("cp", new_txt_path, "tahoe:test/test.txt"))
# If we get test.txt now, we should see the new data.
d.addCallback(lambda ignored:
self.do_cli("get", "tahoe:test/test.txt"))
d.addCallback(lambda (rc, out, err):
self.failUnlessEqual(out, new_txt_contents))
# If we get the json of the new file, we should see that the old
# uri is there
d.addCallback(lambda ignored:
self.do_cli("ls", "--json", "tahoe:test/test.txt"))
def _check_json((rc, out, err)):
self.failUnlessEqual(rc, 0)
filetype, data = simplejson.loads(out)
self.failUnlessEqual(filetype, "filenode")
self.failUnless(data['mutable'])
self.failUnlessIn("ro_uri", data)
self.failUnlessEqual(to_str(data["ro_uri"]), self.ro_uri)
self.failUnlessIn("rw_uri", data)
self.failUnlessEqual(to_str(data["rw_uri"]), self.rw_uri)
d.addCallback(_check_json)
# and, finally, doing a GET directly on one of the old uris
# should give us the new contents.
d.addCallback(lambda ignored:
self.do_cli("get", self.rw_uri))
d.addCallback(lambda (rc, out, err):
self.failUnlessEqual(out, new_txt_contents))
# Now copy the old test.txt without an explicit destination
# file. tahoe cp will match it to the existing file and
# overwrite it appropriately.
d.addCallback(lambda ignored:
self.do_cli("cp", test_txt_path, "tahoe:test"))
d.addCallback(lambda ignored:
self.do_cli("get", "tahoe:test/test.txt"))
d.addCallback(lambda (rc, out, err):
self.failUnlessEqual(out, test_txt_contents))
d.addCallback(lambda ignored:
self.do_cli("ls", "--json", "tahoe:test/test.txt"))
d.addCallback(_check_json)
d.addCallback(lambda ignored:
self.do_cli("get", self.rw_uri))
d.addCallback(lambda (rc, out, err):
self.failUnlessEqual(out, test_txt_contents))
# Now we'll make a more complicated directory structure.
# test2/
# test2/mutable1
# test2/mutable2
# test2/imm1
# test2/imm2
imm_test_txt_path = os.path.join(self.basedir, "imm_test.txt")
imm_test_txt_contents = test_txt_contents * 10000
fileutil.write(imm_test_txt_path, imm_test_txt_contents)
d.addCallback(lambda ignored:
self.do_cli("mkdir", "tahoe:test2"))
d.addCallback(lambda ignored:
self.do_cli("put", "--mutable", new_txt_path,
"tahoe:test2/mutable1"))
d.addCallback(lambda ignored:
self.do_cli("put", "--mutable", new_txt_path,
"tahoe:test2/mutable2"))
d.addCallback(lambda ignored:
self.do_cli('put', new_txt_path, "tahoe:test2/imm1"))
d.addCallback(lambda ignored:
self.do_cli("put", imm_test_txt_path, "tahoe:test2/imm2"))
d.addCallback(lambda ignored:
self.do_cli("ls", "--json", "tahoe:test2"))
def _process_directory_json((rc, out, err)):
self.failUnlessEqual(rc, 0)
filetype, data = simplejson.loads(out)
self.failUnlessEqual(filetype, "dirnode")
self.failUnless(data['mutable'])
self.failUnlessIn("children", data)
children = data['children']
# Store the URIs for later use.
self.childuris = {}
for k in ["mutable1", "mutable2", "imm1", "imm2"]:
self.failUnlessIn(k, children)
childtype, childdata = children[k]
self.failUnlessEqual(childtype, "filenode")
if "mutable" in k:
self.failUnless(childdata['mutable'])
self.failUnlessIn("rw_uri", childdata)
uri_key = "rw_uri"
else:
self.failIf(childdata['mutable'])
self.failUnlessIn("ro_uri", childdata)
uri_key = "ro_uri"
self.childuris[k] = to_str(childdata[uri_key])
d.addCallback(_process_directory_json)
# Now build a local directory to copy into place, like the following:
# test2/
# test2/mutable1
# test2/mutable2
# test2/imm1
# test2/imm3
def _build_local_directory(ignored):
test2_path = os.path.join(self.basedir, "test2")
fileutil.make_dirs(test2_path)
for fn in ("mutable1", "mutable2", "imm1", "imm3"):
fileutil.write(os.path.join(test2_path, fn), fn * 1000)
self.test2_path = test2_path
d.addCallback(_build_local_directory)
d.addCallback(lambda ignored:
self.do_cli("cp", "-r", self.test2_path, "tahoe:"))
# We expect that mutable1 and mutable2 are overwritten in-place,
# so they'll retain their URIs but have different content.
def _process_file_json((rc, out, err), fn):
self.failUnlessEqual(rc, 0)
filetype, data = simplejson.loads(out)
self.failUnlessEqual(filetype, "filenode")
if "mutable" in fn:
self.failUnless(data['mutable'])
self.failUnlessIn("rw_uri", data)
self.failUnlessEqual(to_str(data["rw_uri"]), self.childuris[fn])
else:
self.failIf(data['mutable'])
self.failUnlessIn("ro_uri", data)
self.failIfEqual(to_str(data["ro_uri"]), self.childuris[fn])
for fn in ("mutable1", "mutable2"):
d.addCallback(lambda ignored, fn=fn:
self.do_cli("get", "tahoe:test2/%s" % fn))
d.addCallback(lambda (rc, out, err), fn=fn:
self.failUnlessEqual(out, fn * 1000))
d.addCallback(lambda ignored, fn=fn:
self.do_cli("ls", "--json", "tahoe:test2/%s" % fn))
d.addCallback(_process_file_json, fn=fn)
# imm1 should have been replaced, so both its uri and content
# should be different.
d.addCallback(lambda ignored:
self.do_cli("get", "tahoe:test2/imm1"))
d.addCallback(lambda (rc, out, err):
self.failUnlessEqual(out, "imm1" * 1000))
d.addCallback(lambda ignored:
self.do_cli("ls", "--json", "tahoe:test2/imm1"))
d.addCallback(_process_file_json, fn="imm1")
# imm3 should have been created.
d.addCallback(lambda ignored:
self.do_cli("get", "tahoe:test2/imm3"))
d.addCallback(lambda (rc, out, err):
self.failUnlessEqual(out, "imm3" * 1000))
# imm2 should be exactly as we left it, since our newly-copied
# directory didn't contain an imm2 entry.
d.addCallback(lambda ignored:
self.do_cli("get", "tahoe:test2/imm2"))
d.addCallback(lambda (rc, out, err):
self.failUnlessEqual(out, imm_test_txt_contents))
d.addCallback(lambda ignored:
self.do_cli("ls", "--json", "tahoe:test2/imm2"))
def _process_imm2_json((rc, out, err)):
self.failUnlessEqual(rc, 0)
filetype, data = simplejson.loads(out)
self.failUnlessEqual(filetype, "filenode")
self.failIf(data['mutable'])
self.failUnlessIn("ro_uri", data)
self.failUnlessEqual(to_str(data["ro_uri"]), self.childuris["imm2"])
d.addCallback(_process_imm2_json)
return d
def test_cp_overwrite_readonly_mutable_file(self):
# tahoe cp should print an error when asked to overwrite a
# mutable file that it can't overwrite.
self.basedir = "cli/Cp/overwrite_readonly_mutable_file"
self.set_up_grid()
# This is our initial file. We'll link its readcap into the
# tahoe: alias.
test_file_path = os.path.join(self.basedir, "test_file.txt")
test_file_contents = "This is a test file."
fileutil.write(test_file_path, test_file_contents)
# This is our replacement file. We'll try and fail to upload it
# over the readcap that we linked into the tahoe: alias.
replacement_file_path = os.path.join(self.basedir, "replacement.txt")
replacement_file_contents = "These are new contents."
fileutil.write(replacement_file_path, replacement_file_contents)
d = self.do_cli("create-alias", "tahoe:")
d.addCallback(lambda ignored:
self.do_cli("put", "--mutable", test_file_path))
def _get_test_uri((rc, out, err)):
self.failUnlessEqual(rc, 0)
# this should be a write uri
self._test_write_uri = out
d.addCallback(_get_test_uri)
d.addCallback(lambda ignored:
self.do_cli("ls", "--json", self._test_write_uri))
def _process_test_json((rc, out, err)):
self.failUnlessEqual(rc, 0)
filetype, data = simplejson.loads(out)
self.failUnlessEqual(filetype, "filenode")
self.failUnless(data['mutable'])
self.failUnlessIn("ro_uri", data)
self._test_read_uri = to_str(data["ro_uri"])
d.addCallback(_process_test_json)
# Now we'll link the readonly URI into the tahoe: alias.
d.addCallback(lambda ignored:
self.do_cli("ln", self._test_read_uri, "tahoe:test_file.txt"))
d.addCallback(lambda (rc, out, err):
self.failUnlessEqual(rc, 0))
# Let's grab the json of that to make sure that we did it right.
d.addCallback(lambda ignored:
self.do_cli("ls", "--json", "tahoe:"))
def _process_tahoe_json((rc, out, err)):
self.failUnlessEqual(rc, 0)
filetype, data = simplejson.loads(out)
self.failUnlessEqual(filetype, "dirnode")
self.failUnlessIn("children", data)
kiddata = data['children']
self.failUnlessIn("test_file.txt", kiddata)
testtype, testdata = kiddata['test_file.txt']
self.failUnlessEqual(testtype, "filenode")
self.failUnless(testdata['mutable'])
self.failUnlessIn("ro_uri", testdata)
self.failUnlessEqual(to_str(testdata["ro_uri"]), self._test_read_uri)
self.failIfIn("rw_uri", testdata)
d.addCallback(_process_tahoe_json)
# Okay, now we're going to try uploading another mutable file in
# place of that one. We should get an error.
d.addCallback(lambda ignored:
self.do_cli("cp", replacement_file_path, "tahoe:test_file.txt"))
def _check_error_message((rc, out, err)):
self.failUnlessEqual(rc, 1)
self.failUnlessIn("replace or update requested with read-only cap", err)
d.addCallback(_check_error_message)
# Make extra sure that that didn't work.
d.addCallback(lambda ignored:
self.do_cli("get", "tahoe:test_file.txt"))
d.addCallback(lambda (rc, out, err):
self.failUnlessEqual(out, test_file_contents))
d.addCallback(lambda ignored:
self.do_cli("get", self._test_read_uri))
d.addCallback(lambda (rc, out, err):
self.failUnlessEqual(out, test_file_contents))
# Now we'll do it without an explicit destination.
d.addCallback(lambda ignored:
self.do_cli("cp", test_file_path, "tahoe:"))
d.addCallback(_check_error_message)
d.addCallback(lambda ignored:
self.do_cli("get", "tahoe:test_file.txt"))
d.addCallback(lambda (rc, out, err):
self.failUnlessEqual(out, test_file_contents))
d.addCallback(lambda ignored:
self.do_cli("get", self._test_read_uri))
d.addCallback(lambda (rc, out, err):
self.failUnlessEqual(out, test_file_contents))
# Now we'll link a readonly file into a subdirectory.
d.addCallback(lambda ignored:
self.do_cli("mkdir", "tahoe:testdir"))
d.addCallback(lambda (rc, out, err):
self.failUnlessEqual(rc, 0))
d.addCallback(lambda ignored:
self.do_cli("ln", self._test_read_uri, "tahoe:test/file2.txt"))
d.addCallback(lambda (rc, out, err):
self.failUnlessEqual(rc, 0))
test_dir_path = os.path.join(self.basedir, "test")
fileutil.make_dirs(test_dir_path)
for f in ("file1.txt", "file2.txt"):
fileutil.write(os.path.join(test_dir_path, f), f * 10000)
d.addCallback(lambda ignored:
self.do_cli("cp", "-r", test_dir_path, "tahoe:"))
d.addCallback(_check_error_message)
d.addCallback(lambda ignored:
self.do_cli("ls", "--json", "tahoe:test"))
def _got_testdir_json((rc, out, err)):
self.failUnlessEqual(rc, 0)
filetype, data = simplejson.loads(out)
self.failUnlessEqual(filetype, "dirnode")
self.failUnlessIn("children", data)
childdata = data['children']
self.failUnlessIn("file2.txt", childdata)
file2type, file2data = childdata['file2.txt']
self.failUnlessEqual(file2type, "filenode")
self.failUnless(file2data['mutable'])
self.failUnlessIn("ro_uri", file2data)
self.failUnlessEqual(to_str(file2data["ro_uri"]), self._test_read_uri)
self.failIfIn("rw_uri", file2data)
d.addCallback(_got_testdir_json)
return d
def test_cp_verbose(self):
self.basedir = "cli/Cp/cp_verbose"
self.set_up_grid()
# Write two test files, which we'll copy to the grid.
test1_path = os.path.join(self.basedir, "test1")
test2_path = os.path.join(self.basedir, "test2")
fileutil.write(test1_path, "test1")
fileutil.write(test2_path, "test2")
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda ign:
self.do_cli("cp", "--verbose", test1_path, test2_path, "tahoe:"))
def _check(res):
(rc, out, err) = res
self.failUnlessEqual(rc, 0, str(res))
self.failUnlessIn("Success: files copied", out, str(res))
self.failUnlessEqual(err, """\
attaching sources to targets, 2 files / 0 dirs in root
targets assigned, 1 dirs, 2 files
starting copy, 2 files, 1 directories
1/2 files, 0/1 directories
2/2 files, 0/1 directories
1/1 directories
""", str(res))
d.addCallback(_check)
return d
def test_cp_copies_dir(self):
# This test ensures that a directory is copied using
# tahoe cp -r. Refer to ticket #712:
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/712
self.basedir = "cli/Cp/cp_copies_dir"
self.set_up_grid()
subdir = os.path.join(self.basedir, "foo")
os.mkdir(subdir)
test1_path = os.path.join(subdir, "test1")
fileutil.write(test1_path, "test1")
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda ign:
self.do_cli("cp", "-r", subdir, "tahoe:"))
d.addCallback(lambda ign:
self.do_cli("ls", "tahoe:"))
def _check(res, item):
(rc, out, err) = res
self.failUnlessEqual(rc, 0)
self.failUnlessEqual(err, "")
self.failUnlessIn(item, out, str(res))
d.addCallback(_check, "foo")
d.addCallback(lambda ign:
self.do_cli("ls", "tahoe:foo/"))
d.addCallback(_check, "test1")
d.addCallback(lambda ign: fileutil.rm_dir(subdir))
d.addCallback(lambda ign: self.do_cli("cp", "-r", "tahoe:foo", self.basedir))
def _check_local_fs(ign):
self.failUnless(os.path.isdir(self.basedir))
self.failUnless(os.path.isfile(test1_path))
d.addCallback(_check_local_fs)
return d
def test_ticket_2027(self):
# This test ensures that tahoe will copy a file from the grid to
# a local directory without a specified file name.
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2027
self.basedir = "cli/Cp/cp_verbose"
self.set_up_grid()
# Write a test file, which we'll copy to the grid.
test1_path = os.path.join(self.basedir, "test1")
fileutil.write(test1_path, "test1")
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda ign:
self.do_cli("cp", test1_path, "tahoe:"))
d.addCallback(lambda ign:
self.do_cli("cp", "tahoe:test1", self.basedir))
def _check(res):
(rc, out, err) = res
self.failUnlessIn("Success: file copied", out, str(res))
return d

View File

@ -0,0 +1,209 @@
import os.path
from twisted.trial import unittest
import urllib
from allmydata.util import fileutil
from allmydata.scripts.common import get_aliases
from allmydata.scripts import cli, runner
from allmydata.test.no_network import GridTestMixin
from allmydata.util.encodingutil import quote_output, get_io_encoding
from .test_cli import CLITestMixin
timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
def _test_webopen(self, args, expected_url):
o = runner.Options()
o.parseOptions(["--node-directory", self.get_clientdir(), "webopen"]
+ list(args))
urls = []
rc = cli.webopen(o, urls.append)
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(len(urls), 1)
self.failUnlessReallyEqual(urls[0], expected_url)
def test_create(self):
self.basedir = "cli/CreateAlias/create"
self.set_up_grid()
aliasfile = os.path.join(self.get_clientdir(), "private", "aliases")
d = self.do_cli("create-alias", "tahoe")
def _done((rc,stdout,stderr)):
self.failUnless("Alias 'tahoe' created" in stdout)
self.failIf(stderr)
aliases = get_aliases(self.get_clientdir())
self.failUnless("tahoe" in aliases)
self.failUnless(aliases["tahoe"].startswith("URI:DIR2:"))
d.addCallback(_done)
d.addCallback(lambda res: self.do_cli("create-alias", "two:"))
def _stash_urls(res):
aliases = get_aliases(self.get_clientdir())
node_url_file = os.path.join(self.get_clientdir(), "node.url")
nodeurl = fileutil.read(node_url_file).strip()
self.welcome_url = nodeurl
uribase = nodeurl + "uri/"
self.tahoe_url = uribase + urllib.quote(aliases["tahoe"])
self.tahoe_subdir_url = self.tahoe_url + "/subdir"
self.two_url = uribase + urllib.quote(aliases["two"])
self.two_uri = aliases["two"]
d.addCallback(_stash_urls)
d.addCallback(lambda res: self.do_cli("create-alias", "two")) # dup
def _check_create_duplicate((rc,stdout,stderr)):
self.failIfEqual(rc, 0)
self.failUnless("Alias 'two' already exists!" in stderr)
aliases = get_aliases(self.get_clientdir())
self.failUnlessReallyEqual(aliases["two"], self.two_uri)
d.addCallback(_check_create_duplicate)
d.addCallback(lambda res: self.do_cli("add-alias", "added", self.two_uri))
def _check_add((rc,stdout,stderr)):
self.failUnlessReallyEqual(rc, 0)
self.failUnless("Alias 'added' added" in stdout)
d.addCallback(_check_add)
# check add-alias with a duplicate
d.addCallback(lambda res: self.do_cli("add-alias", "two", self.two_uri))
def _check_add_duplicate((rc,stdout,stderr)):
self.failIfEqual(rc, 0)
self.failUnless("Alias 'two' already exists!" in stderr)
aliases = get_aliases(self.get_clientdir())
self.failUnlessReallyEqual(aliases["two"], self.two_uri)
d.addCallback(_check_add_duplicate)
# check create-alias and add-alias with invalid aliases
def _check_invalid((rc,stdout,stderr)):
self.failIfEqual(rc, 0)
self.failUnlessIn("cannot contain", stderr)
for invalid in ['foo:bar', 'foo bar', 'foobar::']:
d.addCallback(lambda res, invalid=invalid: self.do_cli("create-alias", invalid))
d.addCallback(_check_invalid)
d.addCallback(lambda res, invalid=invalid: self.do_cli("add-alias", invalid, self.two_uri))
d.addCallback(_check_invalid)
def _test_urls(junk):
self._test_webopen([], self.welcome_url)
self._test_webopen(["/"], self.tahoe_url)
self._test_webopen(["tahoe:"], self.tahoe_url)
self._test_webopen(["tahoe:/"], self.tahoe_url)
self._test_webopen(["tahoe:subdir"], self.tahoe_subdir_url)
self._test_webopen(["-i", "tahoe:subdir"],
self.tahoe_subdir_url+"?t=info")
self._test_webopen(["tahoe:subdir/"], self.tahoe_subdir_url + '/')
self._test_webopen(["tahoe:subdir/file"],
self.tahoe_subdir_url + '/file')
self._test_webopen(["--info", "tahoe:subdir/file"],
self.tahoe_subdir_url + '/file?t=info')
# if "file" is indeed a file, then the url produced by webopen in
# this case is disallowed by the webui. but by design, webopen
# passes through the mistake from the user to the resultant
# webopened url
self._test_webopen(["tahoe:subdir/file/"], self.tahoe_subdir_url + '/file/')
self._test_webopen(["two:"], self.two_url)
d.addCallback(_test_urls)
def _remove_trailing_newline_and_create_alias(ign):
# ticket #741 is about a manually-edited alias file (which
# doesn't end in a newline) being corrupted by a subsequent
# "tahoe create-alias"
old = fileutil.read(aliasfile)
fileutil.write(aliasfile, old.rstrip())
return self.do_cli("create-alias", "un-corrupted1")
d.addCallback(_remove_trailing_newline_and_create_alias)
def _check_not_corrupted1((rc,stdout,stderr)):
self.failUnless("Alias 'un-corrupted1' created" in stdout, stdout)
self.failIf(stderr)
# the old behavior was to simply append the new record, causing a
# line that looked like "NAME1: CAP1NAME2: CAP2". This won't look
# like a valid dircap, so get_aliases() will raise an exception.
aliases = get_aliases(self.get_clientdir())
self.failUnless("added" in aliases)
self.failUnless(aliases["added"].startswith("URI:DIR2:"))
# to be safe, let's confirm that we don't see "NAME2:" in CAP1.
# No chance of a false-negative, because the hyphen in
# "un-corrupted1" is not a valid base32 character.
self.failIfIn("un-corrupted1:", aliases["added"])
self.failUnless("un-corrupted1" in aliases)
self.failUnless(aliases["un-corrupted1"].startswith("URI:DIR2:"))
d.addCallback(_check_not_corrupted1)
def _remove_trailing_newline_and_add_alias(ign):
# same thing, but for "tahoe add-alias"
old = fileutil.read(aliasfile)
fileutil.write(aliasfile, old.rstrip())
return self.do_cli("add-alias", "un-corrupted2", self.two_uri)
d.addCallback(_remove_trailing_newline_and_add_alias)
def _check_not_corrupted((rc,stdout,stderr)):
self.failUnless("Alias 'un-corrupted2' added" in stdout, stdout)
self.failIf(stderr)
aliases = get_aliases(self.get_clientdir())
self.failUnless("un-corrupted1" in aliases)
self.failUnless(aliases["un-corrupted1"].startswith("URI:DIR2:"))
self.failIfIn("un-corrupted2:", aliases["un-corrupted1"])
self.failUnless("un-corrupted2" in aliases)
self.failUnless(aliases["un-corrupted2"].startswith("URI:DIR2:"))
d.addCallback(_check_not_corrupted)
def test_create_unicode(self):
self.basedir = "cli/CreateAlias/create_unicode"
self.set_up_grid()
try:
etudes_arg = u"\u00E9tudes".encode(get_io_encoding())
lumiere_arg = u"lumi\u00E8re.txt".encode(get_io_encoding())
except UnicodeEncodeError:
raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.")
d = self.do_cli("create-alias", etudes_arg)
def _check_create_unicode((rc, out, err)):
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(err, "")
self.failUnlessIn("Alias %s created" % quote_output(u"\u00E9tudes"), out)
aliases = get_aliases(self.get_clientdir())
self.failUnless(aliases[u"\u00E9tudes"].startswith("URI:DIR2:"))
d.addCallback(_check_create_unicode)
d.addCallback(lambda res: self.do_cli("ls", etudes_arg + ":"))
def _check_ls1((rc, out, err)):
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(out, "")
d.addCallback(_check_ls1)
d.addCallback(lambda res: self.do_cli("put", "-", etudes_arg + ":uploaded.txt",
stdin="Blah blah blah"))
d.addCallback(lambda res: self.do_cli("ls", etudes_arg + ":"))
def _check_ls2((rc, out, err)):
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(out, "uploaded.txt\n")
d.addCallback(_check_ls2)
d.addCallback(lambda res: self.do_cli("get", etudes_arg + ":uploaded.txt"))
def _check_get((rc, out, err)):
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(out, "Blah blah blah")
d.addCallback(_check_get)
# Ensure that an Unicode filename in an Unicode alias works as expected
d.addCallback(lambda res: self.do_cli("put", "-", etudes_arg + ":" + lumiere_arg,
stdin="Let the sunshine In!"))
d.addCallback(lambda res: self.do_cli("get",
get_aliases(self.get_clientdir())[u"\u00E9tudes"] + "/" + lumiere_arg))
def _check_get2((rc, out, err)):
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(out, "Let the sunshine In!")
d.addCallback(_check_get2)
return d
# TODO: test list-aliases, including Unicode

View File

@ -0,0 +1,280 @@
from twisted.trial import unittest
from twisted.internet import defer
from allmydata.immutable import upload
from allmydata.interfaces import MDMF_VERSION, SDMF_VERSION
from allmydata.mutable.publish import MutableData
from allmydata.test.no_network import GridTestMixin
from allmydata.util.encodingutil import quote_output, get_io_encoding
from .test_cli import CLITestMixin
timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
class List(GridTestMixin, CLITestMixin, unittest.TestCase):
def test_list(self):
self.basedir = "cli/List/list"
self.set_up_grid()
c0 = self.g.clients[0]
small = "small"
# u"g\u00F6\u00F6d" might not be representable in the argv and/or output encodings.
# It is initially included in the directory in any case.
try:
good_arg = u"g\u00F6\u00F6d".encode(get_io_encoding())
except UnicodeEncodeError:
good_arg = None
try:
good_out = u"g\u00F6\u00F6d".encode(get_io_encoding())
except UnicodeEncodeError:
good_out = None
d = c0.create_dirnode()
def _stash_root_and_create_file(n):
self.rootnode = n
self.rooturi = n.get_uri()
return n.add_file(u"g\u00F6\u00F6d", upload.Data(small, convergence=""))
d.addCallback(_stash_root_and_create_file)
def _stash_goodcap(n):
self.goodcap = n.get_uri()
d.addCallback(_stash_goodcap)
d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"1share"))
d.addCallback(lambda n:
self.delete_shares_numbered(n.get_uri(), range(1,10)))
d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"0share"))
d.addCallback(lambda n:
self.delete_shares_numbered(n.get_uri(), range(0,10)))
d.addCallback(lambda ign:
self.do_cli("add-alias", "tahoe", self.rooturi))
d.addCallback(lambda ign: self.do_cli("ls"))
def _check1((rc,out,err)):
if good_out is None:
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("files whose names could not be converted", err)
self.failUnlessIn(quote_output(u"g\u00F6\u00F6d"), err)
self.failUnlessReallyEqual(sorted(out.splitlines()), sorted(["0share", "1share"]))
else:
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(sorted(out.splitlines()), sorted(["0share", "1share", good_out]))
d.addCallback(_check1)
d.addCallback(lambda ign: self.do_cli("ls", "missing"))
def _check2((rc,out,err)):
self.failIfEqual(rc, 0)
self.failUnlessReallyEqual(err.strip(), "No such file or directory")
self.failUnlessReallyEqual(out, "")
d.addCallback(_check2)
d.addCallback(lambda ign: self.do_cli("ls", "1share"))
def _check3((rc,out,err)):
self.failIfEqual(rc, 0)
self.failUnlessIn("Error during GET: 410 Gone", err)
self.failUnlessIn("UnrecoverableFileError:", err)
self.failUnlessIn("could not be retrieved, because there were "
"insufficient good shares.", err)
self.failUnlessReallyEqual(out, "")
d.addCallback(_check3)
d.addCallback(lambda ign: self.do_cli("ls", "0share"))
d.addCallback(_check3)
def _check4((rc, out, err)):
if good_out is None:
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("files whose names could not be converted", err)
self.failUnlessIn(quote_output(u"g\u00F6\u00F6d"), err)
self.failUnlessReallyEqual(out, "")
else:
# listing a file (as dir/filename) should have the edge metadata,
# including the filename
self.failUnlessReallyEqual(rc, 0)
self.failUnlessIn(good_out, out)
self.failIfIn("-r-- %d -" % len(small), out,
"trailing hyphen means unknown date")
if good_arg is not None:
d.addCallback(lambda ign: self.do_cli("ls", "-l", good_arg))
d.addCallback(_check4)
# listing a file as $DIRCAP/filename should work just like dir/filename
d.addCallback(lambda ign: self.do_cli("ls", "-l", self.rooturi + "/" + good_arg))
d.addCallback(_check4)
# and similarly for $DIRCAP:./filename
d.addCallback(lambda ign: self.do_cli("ls", "-l", self.rooturi + ":./" + good_arg))
d.addCallback(_check4)
def _check5((rc, out, err)):
# listing a raw filecap should not explode, but it will have no
# metadata, just the size
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual("-r-- %d -" % len(small), out.strip())
d.addCallback(lambda ign: self.do_cli("ls", "-l", self.goodcap))
d.addCallback(_check5)
# Now rename 'g\u00F6\u00F6d' to 'good' and repeat the tests that might have been skipped due
# to encoding problems.
d.addCallback(lambda ign: self.rootnode.move_child_to(u"g\u00F6\u00F6d", self.rootnode, u"good"))
d.addCallback(lambda ign: self.do_cli("ls"))
def _check1_ascii((rc,out,err)):
self.failUnlessReallyEqual(rc, 0)
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(sorted(out.splitlines()), sorted(["0share", "1share", "good"]))
d.addCallback(_check1_ascii)
def _check4_ascii((rc, out, err)):
# listing a file (as dir/filename) should have the edge metadata,
# including the filename
self.failUnlessReallyEqual(rc, 0)
self.failUnlessIn("good", out)
self.failIfIn("-r-- %d -" % len(small), out,
"trailing hyphen means unknown date")
d.addCallback(lambda ign: self.do_cli("ls", "-l", "good"))
d.addCallback(_check4_ascii)
# listing a file as $DIRCAP/filename should work just like dir/filename
d.addCallback(lambda ign: self.do_cli("ls", "-l", self.rooturi + "/good"))
d.addCallback(_check4_ascii)
# and similarly for $DIRCAP:./filename
d.addCallback(lambda ign: self.do_cli("ls", "-l", self.rooturi + ":./good"))
d.addCallback(_check4_ascii)
unknown_immcap = "imm.URI:unknown"
def _create_unknown(ign):
nm = c0.nodemaker
kids = {u"unknownchild-imm": (nm.create_from_cap(unknown_immcap), {})}
return self.rootnode.create_subdirectory(u"unknown", initial_children=kids,
mutable=False)
d.addCallback(_create_unknown)
def _check6((rc, out, err)):
# listing a directory referencing an unknown object should print
# an extra message to stderr
self.failUnlessReallyEqual(rc, 0)
self.failUnlessIn("?r-- ? - unknownchild-imm\n", out)
self.failUnlessIn("included unknown objects", err)
d.addCallback(lambda ign: self.do_cli("ls", "-l", "unknown"))
d.addCallback(_check6)
def _check7((rc, out, err)):
# listing an unknown cap directly should print an extra message
# to stderr (currently this only works if the URI starts with 'URI:'
# after any 'ro.' or 'imm.' prefix, otherwise it will be confused
# with an alias).
self.failUnlessReallyEqual(rc, 0)
self.failUnlessIn("?r-- ? -\n", out)
self.failUnlessIn("included unknown objects", err)
d.addCallback(lambda ign: self.do_cli("ls", "-l", unknown_immcap))
d.addCallback(_check7)
return d
def test_list_without_alias(self):
# doing just 'tahoe ls' without specifying an alias or first
# doing 'tahoe create-alias tahoe' should fail gracefully.
self.basedir = "cli/List/list_without_alias"
self.set_up_grid()
d = self.do_cli("ls")
def _check((rc, out, err)):
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("error:", err)
self.failUnlessReallyEqual(out, "")
d.addCallback(_check)
return d
def test_list_with_nonexistent_alias(self):
# doing 'tahoe ls' while specifying an alias that doesn't already
# exist should fail with an informative error message
self.basedir = "cli/List/list_with_nonexistent_alias"
self.set_up_grid()
d = self.do_cli("ls", "nonexistent:")
def _check((rc, out, err)):
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("error:", err)
self.failUnlessIn("nonexistent", err)
self.failUnlessReallyEqual(out, "")
d.addCallback(_check)
return d
def _create_directory_structure(self):
# Create a simple directory structure that we can use for MDMF,
# SDMF, and immutable testing.
assert self.g
client = self.g.clients[0]
# Create a dirnode
d = client.create_dirnode()
def _got_rootnode(n):
# Add a few nodes.
self._dircap = n.get_uri()
nm = n._nodemaker
# The uploaders may run at the same time, so we need two
# MutableData instances or they'll fight over offsets &c and
# break.
mutable_data = MutableData("data" * 100000)
mutable_data2 = MutableData("data" * 100000)
# Add both kinds of mutable node.
d1 = nm.create_mutable_file(mutable_data,
version=MDMF_VERSION)
d2 = nm.create_mutable_file(mutable_data2,
version=SDMF_VERSION)
# Add an immutable node. We do this through the directory,
# with add_file.
immutable_data = upload.Data("immutable data" * 100000,
convergence="")
d3 = n.add_file(u"immutable", immutable_data)
ds = [d1, d2, d3]
dl = defer.DeferredList(ds)
def _made_files((r1, r2, r3)):
self.failUnless(r1[0])
self.failUnless(r2[0])
self.failUnless(r3[0])
# r1, r2, and r3 contain nodes.
mdmf_node = r1[1]
sdmf_node = r2[1]
imm_node = r3[1]
self._mdmf_uri = mdmf_node.get_uri()
self._mdmf_readonly_uri = mdmf_node.get_readonly_uri()
self._sdmf_uri = mdmf_node.get_uri()
self._sdmf_readonly_uri = sdmf_node.get_readonly_uri()
self._imm_uri = imm_node.get_uri()
d1 = n.set_node(u"mdmf", mdmf_node)
d2 = n.set_node(u"sdmf", sdmf_node)
return defer.DeferredList([d1, d2])
# We can now list the directory by listing self._dircap.
dl.addCallback(_made_files)
return dl
d.addCallback(_got_rootnode)
return d
def test_list_mdmf(self):
# 'tahoe ls' should include MDMF files.
self.basedir = "cli/List/list_mdmf"
self.set_up_grid()
d = self._create_directory_structure()
d.addCallback(lambda ignored:
self.do_cli("ls", self._dircap))
def _got_ls((rc, out, err)):
self.failUnlessEqual(rc, 0)
self.failUnlessEqual(err, "")
self.failUnlessIn("immutable", out)
self.failUnlessIn("mdmf", out)
self.failUnlessIn("sdmf", out)
d.addCallback(_got_ls)
return d
def test_list_mdmf_json(self):
# 'tahoe ls' should include MDMF caps when invoked with MDMF
# caps.
self.basedir = "cli/List/list_mdmf_json"
self.set_up_grid()
d = self._create_directory_structure()
d.addCallback(lambda ignored:
self.do_cli("ls", "--json", self._dircap))
def _got_json((rc, out, err)):
self.failUnlessEqual(rc, 0)
self.failUnlessEqual(err, "")
self.failUnlessIn(self._mdmf_uri, out)
self.failUnlessIn(self._mdmf_readonly_uri, out)
self.failUnlessIn(self._sdmf_uri, out)
self.failUnlessIn(self._sdmf_readonly_uri, out)
self.failUnlessIn(self._imm_uri, out)
self.failUnlessIn('"format": "SDMF"', out)
self.failUnlessIn('"format": "MDMF"', out)
d.addCallback(_got_json)
return d

View File

@ -0,0 +1,197 @@
import os.path
from twisted.trial import unittest
from allmydata.util import fileutil
from allmydata.test.no_network import GridTestMixin
from allmydata.scripts import tahoe_mv
from .test_cli import CLITestMixin
timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
class Mv(GridTestMixin, CLITestMixin, unittest.TestCase):
def test_mv_behavior(self):
self.basedir = "cli/Mv/mv_behavior"
self.set_up_grid()
fn1 = os.path.join(self.basedir, "file1")
DATA1 = "Nuclear launch codes"
fileutil.write(fn1, DATA1)
fn2 = os.path.join(self.basedir, "file2")
DATA2 = "UML diagrams"
fileutil.write(fn2, DATA2)
# copy both files to the grid
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda res:
self.do_cli("cp", fn1, "tahoe:"))
d.addCallback(lambda res:
self.do_cli("cp", fn2, "tahoe:"))
# do mv file1 file3
# (we should be able to rename files)
d.addCallback(lambda res:
self.do_cli("mv", "tahoe:file1", "tahoe:file3"))
d.addCallback(lambda (rc, out, err):
self.failUnlessIn("OK", out, "mv didn't rename a file"))
# do mv file3 file2
# (This should succeed without issue)
d.addCallback(lambda res:
self.do_cli("mv", "tahoe:file3", "tahoe:file2"))
# Out should contain "OK" to show that the transfer worked.
d.addCallback(lambda (rc,out,err):
self.failUnlessIn("OK", out, "mv didn't output OK after mving"))
# Next, make a remote directory.
d.addCallback(lambda res:
self.do_cli("mkdir", "tahoe:directory"))
# mv file2 directory
# (should fail with a descriptive error message; the CLI mv
# client should support this)
d.addCallback(lambda res:
self.do_cli("mv", "tahoe:file2", "tahoe:directory"))
d.addCallback(lambda (rc, out, err):
self.failUnlessIn(
"Error: You can't overwrite a directory with a file", err,
"mv shouldn't overwrite directories" ))
# mv file2 directory/
# (should succeed by making file2 a child node of directory)
d.addCallback(lambda res:
self.do_cli("mv", "tahoe:file2", "tahoe:directory/"))
# We should see an "OK"...
d.addCallback(lambda (rc, out, err):
self.failUnlessIn("OK", out,
"mv didn't mv a file into a directory"))
# ... and be able to GET the file
d.addCallback(lambda res:
self.do_cli("get", "tahoe:directory/file2", self.basedir + "new"))
d.addCallback(lambda (rc, out, err):
self.failUnless(os.path.exists(self.basedir + "new"),
"mv didn't write the destination file"))
# ... and not find the file where it was before.
d.addCallback(lambda res:
self.do_cli("get", "tahoe:file2", "file2"))
d.addCallback(lambda (rc, out, err):
self.failUnlessIn("404", err,
"mv left the source file intact"))
# Let's build:
# directory/directory2/some_file
# directory3
d.addCallback(lambda res:
self.do_cli("mkdir", "tahoe:directory/directory2"))
d.addCallback(lambda res:
self.do_cli("cp", fn2, "tahoe:directory/directory2/some_file"))
d.addCallback(lambda res:
self.do_cli("mkdir", "tahoe:directory3"))
# Let's now try to mv directory/directory2/some_file to
# directory3/some_file
d.addCallback(lambda res:
self.do_cli("mv", "tahoe:directory/directory2/some_file",
"tahoe:directory3/"))
# We should have just some_file in tahoe:directory3
d.addCallback(lambda res:
self.do_cli("get", "tahoe:directory3/some_file", "some_file"))
d.addCallback(lambda (rc, out, err):
self.failUnless("404" not in err,
"mv didn't handle nested directories correctly"))
d.addCallback(lambda res:
self.do_cli("get", "tahoe:directory3/directory", "directory"))
d.addCallback(lambda (rc, out, err):
self.failUnlessIn("404", err,
"mv moved the wrong thing"))
return d
def test_mv_error_if_DELETE_fails(self):
self.basedir = "cli/Mv/mv_error_if_DELETE_fails"
self.set_up_grid()
fn1 = os.path.join(self.basedir, "file1")
DATA1 = "Nuclear launch codes"
fileutil.write(fn1, DATA1)
original_do_http = tahoe_mv.do_http
def mock_do_http(method, url, body=""):
if method == "DELETE":
class FakeResponse:
def read(self):
return "response"
resp = FakeResponse()
resp.status = '500 Something Went Wrong'
resp.reason = '*shrug*'
return resp
else:
return original_do_http(method, url, body=body)
tahoe_mv.do_http = mock_do_http
# copy file to the grid
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda res:
self.do_cli("cp", fn1, "tahoe:"))
# do mv file1 file2
d.addCallback(lambda res:
self.do_cli("mv", "tahoe:file1", "tahoe:file2"))
def _check( (rc, out, err) ):
self.failIfIn("OK", out, "mv printed 'OK' even though the DELETE failed")
self.failUnlessEqual(rc, 2)
d.addCallback(_check)
def _restore_do_http(res):
tahoe_mv.do_http = original_do_http
return res
d.addBoth(_restore_do_http)
return d
def test_mv_without_alias(self):
# doing 'tahoe mv' without explicitly specifying an alias or
# creating the default 'tahoe' alias should fail with a useful
# error message.
self.basedir = "cli/Mv/mv_without_alias"
self.set_up_grid()
d = self.do_cli("mv", "afile", "anotherfile")
def _check((rc, out, err)):
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("error:", err)
self.failUnlessReallyEqual(out, "")
d.addCallback(_check)
# check to see that the validation extends to the
# target argument by making an alias that will work with the first
# one.
d.addCallback(lambda ign: self.do_cli("create-alias", "havasu"))
def _create_a_test_file(ign):
self.test_file_path = os.path.join(self.basedir, "afile")
fileutil.write(self.test_file_path, "puppies" * 100)
d.addCallback(_create_a_test_file)
d.addCallback(lambda ign: self.do_cli("put", self.test_file_path,
"havasu:afile"))
d.addCallback(lambda ign: self.do_cli("mv", "havasu:afile",
"anotherfile"))
d.addCallback(_check)
return d
def test_mv_with_nonexistent_alias(self):
# doing 'tahoe mv' with an alias that doesn't exist should fail
# with an informative error message.
self.basedir = "cli/Mv/mv_with_nonexistent_alias"
self.set_up_grid()
d = self.do_cli("mv", "fake:afile", "fake:anotherfile")
def _check((rc, out, err)):
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("error:", err)
self.failUnlessIn("fake", err)
self.failUnlessReallyEqual(out, "")
d.addCallback(_check)
# check to see that the validation extends to the
# target argument by making an alias that will work with the first
# one.
d.addCallback(lambda ign: self.do_cli("create-alias", "havasu"))
def _create_a_test_file(ign):
self.test_file_path = os.path.join(self.basedir, "afile")
fileutil.write(self.test_file_path, "puppies" * 100)
d.addCallback(_create_a_test_file)
d.addCallback(lambda ign: self.do_cli("put", self.test_file_path,
"havasu:afile"))
d.addCallback(lambda ign: self.do_cli("mv", "havasu:afile",
"fake:anotherfile"))
d.addCallback(_check)
return d

View File

@ -0,0 +1,457 @@
import os.path
from twisted.trial import unittest
from twisted.python import usage
from allmydata.util import fileutil
from allmydata.scripts.common import get_aliases
from allmydata.scripts import cli
from allmydata.test.no_network import GridTestMixin
from allmydata.util.encodingutil import get_io_encoding, unicode_to_argv
from allmydata.util.fileutil import abspath_expanduser_unicode
from .test_cli import CLITestMixin
timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
def test_unlinked_immutable_stdin(self):
# tahoe get `echo DATA | tahoe put`
# tahoe get `echo DATA | tahoe put -`
self.basedir = "cli/Put/unlinked_immutable_stdin"
DATA = "data" * 100
self.set_up_grid()
d = self.do_cli("put", stdin=DATA)
def _uploaded(res):
(rc, out, err) = res
self.failUnlessIn("waiting for file data on stdin..", err)
self.failUnlessIn("200 OK", err)
self.readcap = out
self.failUnless(self.readcap.startswith("URI:CHK:"))
d.addCallback(_uploaded)
d.addCallback(lambda res: self.do_cli("get", self.readcap))
def _downloaded(res):
(rc, out, err) = res
self.failUnlessReallyEqual(err, "")
self.failUnlessReallyEqual(out, DATA)
d.addCallback(_downloaded)
d.addCallback(lambda res: self.do_cli("put", "-", stdin=DATA))
d.addCallback(lambda (rc, out, err):
self.failUnlessReallyEqual(out, self.readcap))
return d
def test_unlinked_immutable_from_file(self):
# tahoe put file.txt
# tahoe put ./file.txt
# tahoe put /tmp/file.txt
# tahoe put ~/file.txt
self.basedir = "cli/Put/unlinked_immutable_from_file"
self.set_up_grid()
rel_fn = os.path.join(self.basedir, "DATAFILE")
abs_fn = unicode_to_argv(abspath_expanduser_unicode(unicode(rel_fn)))
# we make the file small enough to fit in a LIT file, for speed
fileutil.write(rel_fn, "short file")
d = self.do_cli("put", rel_fn)
def _uploaded((rc, out, err)):
readcap = out
self.failUnless(readcap.startswith("URI:LIT:"), readcap)
self.readcap = readcap
d.addCallback(_uploaded)
d.addCallback(lambda res: self.do_cli("put", "./" + rel_fn))
d.addCallback(lambda (rc,stdout,stderr):
self.failUnlessReallyEqual(stdout, self.readcap))
d.addCallback(lambda res: self.do_cli("put", abs_fn))
d.addCallback(lambda (rc,stdout,stderr):
self.failUnlessReallyEqual(stdout, self.readcap))
# we just have to assume that ~ is handled properly
return d
def test_immutable_from_file(self):
# tahoe put file.txt uploaded.txt
# tahoe - uploaded.txt
# tahoe put file.txt subdir/uploaded.txt
# tahoe put file.txt tahoe:uploaded.txt
# tahoe put file.txt tahoe:subdir/uploaded.txt
# tahoe put file.txt DIRCAP:./uploaded.txt
# tahoe put file.txt DIRCAP:./subdir/uploaded.txt
self.basedir = "cli/Put/immutable_from_file"
self.set_up_grid()
rel_fn = os.path.join(self.basedir, "DATAFILE")
# we make the file small enough to fit in a LIT file, for speed
DATA = "short file"
DATA2 = "short file two"
fileutil.write(rel_fn, DATA)
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda res:
self.do_cli("put", rel_fn, "uploaded.txt"))
def _uploaded((rc, out, err)):
readcap = out.strip()
self.failUnless(readcap.startswith("URI:LIT:"), readcap)
self.failUnlessIn("201 Created", err)
self.readcap = readcap
d.addCallback(_uploaded)
d.addCallback(lambda res:
self.do_cli("get", "tahoe:uploaded.txt"))
d.addCallback(lambda (rc,stdout,stderr):
self.failUnlessReallyEqual(stdout, DATA))
d.addCallback(lambda res:
self.do_cli("put", "-", "uploaded.txt", stdin=DATA2))
def _replaced((rc, out, err)):
readcap = out.strip()
self.failUnless(readcap.startswith("URI:LIT:"), readcap)
self.failUnlessIn("200 OK", err)
d.addCallback(_replaced)
d.addCallback(lambda res:
self.do_cli("put", rel_fn, "subdir/uploaded2.txt"))
d.addCallback(lambda res: self.do_cli("get", "subdir/uploaded2.txt"))
d.addCallback(lambda (rc,stdout,stderr):
self.failUnlessReallyEqual(stdout, DATA))
d.addCallback(lambda res:
self.do_cli("put", rel_fn, "tahoe:uploaded3.txt"))
d.addCallback(lambda res: self.do_cli("get", "tahoe:uploaded3.txt"))
d.addCallback(lambda (rc,stdout,stderr):
self.failUnlessReallyEqual(stdout, DATA))
d.addCallback(lambda res:
self.do_cli("put", rel_fn, "tahoe:subdir/uploaded4.txt"))
d.addCallback(lambda res:
self.do_cli("get", "tahoe:subdir/uploaded4.txt"))
d.addCallback(lambda (rc,stdout,stderr):
self.failUnlessReallyEqual(stdout, DATA))
def _get_dircap(res):
self.dircap = get_aliases(self.get_clientdir())["tahoe"]
d.addCallback(_get_dircap)
d.addCallback(lambda res:
self.do_cli("put", rel_fn,
self.dircap+":./uploaded5.txt"))
d.addCallback(lambda res:
self.do_cli("get", "tahoe:uploaded5.txt"))
d.addCallback(lambda (rc,stdout,stderr):
self.failUnlessReallyEqual(stdout, DATA))
d.addCallback(lambda res:
self.do_cli("put", rel_fn,
self.dircap+":./subdir/uploaded6.txt"))
d.addCallback(lambda res:
self.do_cli("get", "tahoe:subdir/uploaded6.txt"))
d.addCallback(lambda (rc,stdout,stderr):
self.failUnlessReallyEqual(stdout, DATA))
return d
def test_mutable_unlinked(self):
# FILECAP = `echo DATA | tahoe put --mutable`
# tahoe get FILECAP, compare against DATA
# echo DATA2 | tahoe put - FILECAP
# tahoe get FILECAP, compare against DATA2
# tahoe put file.txt FILECAP
self.basedir = "cli/Put/mutable_unlinked"
self.set_up_grid()
DATA = "data" * 100
DATA2 = "two" * 100
rel_fn = os.path.join(self.basedir, "DATAFILE")
DATA3 = "three" * 100
fileutil.write(rel_fn, DATA3)
d = self.do_cli("put", "--mutable", stdin=DATA)
def _created(res):
(rc, out, err) = res
self.failUnlessIn("waiting for file data on stdin..", err)
self.failUnlessIn("200 OK", err)
self.filecap = out
self.failUnless(self.filecap.startswith("URI:SSK:"), self.filecap)
d.addCallback(_created)
d.addCallback(lambda res: self.do_cli("get", self.filecap))
d.addCallback(lambda (rc,out,err): self.failUnlessReallyEqual(out, DATA))
d.addCallback(lambda res: self.do_cli("put", "-", self.filecap, stdin=DATA2))
def _replaced(res):
(rc, out, err) = res
self.failUnlessIn("waiting for file data on stdin..", err)
self.failUnlessIn("200 OK", err)
self.failUnlessReallyEqual(self.filecap, out)
d.addCallback(_replaced)
d.addCallback(lambda res: self.do_cli("get", self.filecap))
d.addCallback(lambda (rc,out,err): self.failUnlessReallyEqual(out, DATA2))
d.addCallback(lambda res: self.do_cli("put", rel_fn, self.filecap))
def _replaced2(res):
(rc, out, err) = res
self.failUnlessIn("200 OK", err)
self.failUnlessReallyEqual(self.filecap, out)
d.addCallback(_replaced2)
d.addCallback(lambda res: self.do_cli("get", self.filecap))
d.addCallback(lambda (rc,out,err): self.failUnlessReallyEqual(out, DATA3))
return d
def test_mutable(self):
# echo DATA1 | tahoe put --mutable - uploaded.txt
# echo DATA2 | tahoe put - uploaded.txt # should modify-in-place
# tahoe get uploaded.txt, compare against DATA2
self.basedir = "cli/Put/mutable"
self.set_up_grid()
DATA1 = "data" * 100
fn1 = os.path.join(self.basedir, "DATA1")
fileutil.write(fn1, DATA1)
DATA2 = "two" * 100
fn2 = os.path.join(self.basedir, "DATA2")
fileutil.write(fn2, DATA2)
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda res:
self.do_cli("put", "--mutable", fn1, "tahoe:uploaded.txt"))
def _check(res):
(rc, out, err) = res
self.failUnlessEqual(rc, 0, str(res))
self.failUnlessEqual(err.strip(), "201 Created", str(res))
self.uri = out
d.addCallback(_check)
d.addCallback(lambda res:
self.do_cli("put", fn2, "tahoe:uploaded.txt"))
def _check2(res):
(rc, out, err) = res
self.failUnlessEqual(rc, 0, str(res))
self.failUnlessEqual(err.strip(), "200 OK", str(res))
self.failUnlessEqual(out, self.uri, str(res))
d.addCallback(_check2)
d.addCallback(lambda res:
self.do_cli("get", "tahoe:uploaded.txt"))
d.addCallback(lambda (rc,out,err): self.failUnlessReallyEqual(out, DATA2))
return d
def _check_mdmf_json(self, (rc, json, err)):
self.failUnlessEqual(rc, 0)
self.failUnlessEqual(err, "")
self.failUnlessIn('"format": "MDMF"', json)
# We also want a valid MDMF cap to be in the json.
self.failUnlessIn("URI:MDMF", json)
self.failUnlessIn("URI:MDMF-RO", json)
self.failUnlessIn("URI:MDMF-Verifier", json)
def _check_sdmf_json(self, (rc, json, err)):
self.failUnlessEqual(rc, 0)
self.failUnlessEqual(err, "")
self.failUnlessIn('"format": "SDMF"', json)
# We also want to see the appropriate SDMF caps.
self.failUnlessIn("URI:SSK", json)
self.failUnlessIn("URI:SSK-RO", json)
self.failUnlessIn("URI:SSK-Verifier", json)
def _check_chk_json(self, (rc, json, err)):
self.failUnlessEqual(rc, 0)
self.failUnlessEqual(err, "")
self.failUnlessIn('"format": "CHK"', json)
# We also want to see the appropriate CHK caps.
self.failUnlessIn("URI:CHK", json)
self.failUnlessIn("URI:CHK-Verifier", json)
def test_format(self):
self.basedir = "cli/Put/format"
self.set_up_grid()
data = "data" * 40000 # 160kB total, two segments
fn1 = os.path.join(self.basedir, "data")
fileutil.write(fn1, data)
d = self.do_cli("create-alias", "tahoe")
def _put_and_ls(ign, cmdargs, expected, filename=None):
if filename:
args = ["put"] + cmdargs + [fn1, filename]
else:
# unlinked
args = ["put"] + cmdargs + [fn1]
d2 = self.do_cli(*args)
def _list((rc, out, err)):
self.failUnlessEqual(rc, 0) # don't allow failure
if filename:
return self.do_cli("ls", "--json", filename)
else:
cap = out.strip()
return self.do_cli("ls", "--json", cap)
d2.addCallback(_list)
return d2
# 'tahoe put' to a directory
d.addCallback(_put_and_ls, ["--mutable"], "SDMF", "tahoe:s1.txt")
d.addCallback(self._check_sdmf_json) # backwards-compatibility
d.addCallback(_put_and_ls, ["--format=SDMF"], "SDMF", "tahoe:s2.txt")
d.addCallback(self._check_sdmf_json)
d.addCallback(_put_and_ls, ["--format=sdmf"], "SDMF", "tahoe:s3.txt")
d.addCallback(self._check_sdmf_json)
d.addCallback(_put_and_ls, ["--mutable", "--format=SDMF"], "SDMF", "tahoe:s4.txt")
d.addCallback(self._check_sdmf_json)
d.addCallback(_put_and_ls, ["--format=MDMF"], "MDMF", "tahoe:m1.txt")
d.addCallback(self._check_mdmf_json)
d.addCallback(_put_and_ls, ["--mutable", "--format=MDMF"], "MDMF", "tahoe:m2.txt")
d.addCallback(self._check_mdmf_json)
d.addCallback(_put_and_ls, ["--format=CHK"], "CHK", "tahoe:c1.txt")
d.addCallback(self._check_chk_json)
d.addCallback(_put_and_ls, [], "CHK", "tahoe:c1.txt")
d.addCallback(self._check_chk_json)
# 'tahoe put' unlinked
d.addCallback(_put_and_ls, ["--mutable"], "SDMF")
d.addCallback(self._check_sdmf_json) # backwards-compatibility
d.addCallback(_put_and_ls, ["--format=SDMF"], "SDMF")
d.addCallback(self._check_sdmf_json)
d.addCallback(_put_and_ls, ["--format=sdmf"], "SDMF")
d.addCallback(self._check_sdmf_json)
d.addCallback(_put_and_ls, ["--mutable", "--format=SDMF"], "SDMF")
d.addCallback(self._check_sdmf_json)
d.addCallback(_put_and_ls, ["--format=MDMF"], "MDMF")
d.addCallback(self._check_mdmf_json)
d.addCallback(_put_and_ls, ["--mutable", "--format=MDMF"], "MDMF")
d.addCallback(self._check_mdmf_json)
d.addCallback(_put_and_ls, ["--format=CHK"], "CHK")
d.addCallback(self._check_chk_json)
d.addCallback(_put_and_ls, [], "CHK")
d.addCallback(self._check_chk_json)
return d
def test_put_to_mdmf_cap(self):
self.basedir = "cli/Put/put_to_mdmf_cap"
self.set_up_grid()
data = "data" * 100000
fn1 = os.path.join(self.basedir, "data")
fileutil.write(fn1, data)
d = self.do_cli("put", "--format=MDMF", fn1)
def _got_cap((rc, out, err)):
self.failUnlessEqual(rc, 0)
self.cap = out.strip()
d.addCallback(_got_cap)
# Now try to write something to the cap using put.
data2 = "data2" * 100000
fn2 = os.path.join(self.basedir, "data2")
fileutil.write(fn2, data2)
d.addCallback(lambda ignored:
self.do_cli("put", fn2, self.cap))
def _got_put((rc, out, err)):
self.failUnlessEqual(rc, 0)
self.failUnlessIn(self.cap, out)
d.addCallback(_got_put)
# Now get the cap. We should see the data we just put there.
d.addCallback(lambda ignored:
self.do_cli("get", self.cap))
def _got_data((rc, out, err)):
self.failUnlessEqual(rc, 0)
self.failUnlessEqual(out, data2)
d.addCallback(_got_data)
# add some extension information to the cap and try to put something
# to it.
def _make_extended_cap(ignored):
self.cap = self.cap + ":Extension-Stuff"
d.addCallback(_make_extended_cap)
data3 = "data3" * 100000
fn3 = os.path.join(self.basedir, "data3")
fileutil.write(fn3, data3)
d.addCallback(lambda ignored:
self.do_cli("put", fn3, self.cap))
d.addCallback(lambda ignored:
self.do_cli("get", self.cap))
def _got_data3((rc, out, err)):
self.failUnlessEqual(rc, 0)
self.failUnlessEqual(out, data3)
d.addCallback(_got_data3)
return d
def test_put_to_sdmf_cap(self):
self.basedir = "cli/Put/put_to_sdmf_cap"
self.set_up_grid()
data = "data" * 100000
fn1 = os.path.join(self.basedir, "data")
fileutil.write(fn1, data)
d = self.do_cli("put", "--format=SDMF", fn1)
def _got_cap((rc, out, err)):
self.failUnlessEqual(rc, 0)
self.cap = out.strip()
d.addCallback(_got_cap)
# Now try to write something to the cap using put.
data2 = "data2" * 100000
fn2 = os.path.join(self.basedir, "data2")
fileutil.write(fn2, data2)
d.addCallback(lambda ignored:
self.do_cli("put", fn2, self.cap))
def _got_put((rc, out, err)):
self.failUnlessEqual(rc, 0)
self.failUnlessIn(self.cap, out)
d.addCallback(_got_put)
# Now get the cap. We should see the data we just put there.
d.addCallback(lambda ignored:
self.do_cli("get", self.cap))
def _got_data((rc, out, err)):
self.failUnlessEqual(rc, 0)
self.failUnlessEqual(out, data2)
d.addCallback(_got_data)
return d
def test_mutable_type_invalid_format(self):
o = cli.PutOptions()
self.failUnlessRaises(usage.UsageError,
o.parseOptions,
["--format=LDMF"])
def test_put_with_nonexistent_alias(self):
# when invoked with an alias that doesn't exist, 'tahoe put'
# should output a useful error message, not a stack trace
self.basedir = "cli/Put/put_with_nonexistent_alias"
self.set_up_grid()
d = self.do_cli("put", "somefile", "fake:afile")
def _check((rc, out, err)):
self.failUnlessReallyEqual(rc, 1)
self.failUnlessIn("error:", err)
self.failUnlessReallyEqual(out, "")
d.addCallback(_check)
return d
def test_immutable_from_file_unicode(self):
# tahoe put "\u00E0 trier.txt" "\u00E0 trier.txt"
try:
a_trier_arg = u"\u00E0 trier.txt".encode(get_io_encoding())
except UnicodeEncodeError:
raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.")
self.skip_if_cannot_represent_filename(u"\u00E0 trier.txt")
self.basedir = "cli/Put/immutable_from_file_unicode"
self.set_up_grid()
rel_fn = os.path.join(unicode(self.basedir), u"\u00E0 trier.txt")
# we make the file small enough to fit in a LIT file, for speed
DATA = "short file"
fileutil.write(rel_fn, DATA)
d = self.do_cli("create-alias", "tahoe")
d.addCallback(lambda res:
self.do_cli("put", rel_fn.encode(get_io_encoding()), a_trier_arg))
def _uploaded((rc, out, err)):
readcap = out.strip()
self.failUnless(readcap.startswith("URI:LIT:"), readcap)
self.failUnlessIn("201 Created", err)
self.readcap = readcap
d.addCallback(_uploaded)
d.addCallback(lambda res:
self.do_cli("get", "tahoe:" + a_trier_arg))
d.addCallback(lambda (rc, out, err):
self.failUnlessReallyEqual(out, DATA))
return d