2008-11-13 12:19:51 +00:00
|
|
|
# coding=utf-8
|
2007-10-11 07:30:36 +00:00
|
|
|
|
2008-08-02 02:10:41 +00:00
|
|
|
import os.path
|
2007-10-11 07:30:36 +00:00
|
|
|
from twisted.trial import unittest
|
2008-01-14 20:43:25 +00:00
|
|
|
from cStringIO import StringIO
|
2008-01-14 21:12:27 +00:00
|
|
|
import urllib
|
2007-10-11 07:30:36 +00:00
|
|
|
|
2008-10-07 00:42:04 +00:00
|
|
|
from allmydata.util import fileutil, hashutil
|
2007-10-11 07:30:36 +00:00
|
|
|
from allmydata import uri
|
|
|
|
|
1970-01-05 11:10:55 +00:00
|
|
|
# Test that the scripts can be imported -- although the actual tests of their functionality are
|
|
|
|
# done by invoking them in a subprocess.
|
1970-01-05 11:00:58 +00:00
|
|
|
from allmydata.scripts import tahoe_ls, tahoe_get, tahoe_put, tahoe_rm, tahoe_cp
|
|
|
|
_hush_pyflakes = [tahoe_ls, tahoe_get, tahoe_put, tahoe_rm, tahoe_cp]
|
2007-10-11 07:30:36 +00:00
|
|
|
|
1970-01-05 11:10:55 +00:00
|
|
|
from allmydata.scripts.common import DEFAULT_ALIAS, get_aliases
|
|
|
|
|
2008-08-01 22:10:09 +00:00
|
|
|
from allmydata.scripts import cli, debug, runner
|
|
|
|
from allmydata.test.common import SystemTestMixin
|
|
|
|
from twisted.internet import threads # CLI tests use deferToThread
|
2007-10-11 07:30:36 +00:00
|
|
|
|
|
|
|
class CLI(unittest.TestCase):
|
2008-08-01 22:10:09 +00:00
|
|
|
# this test case only looks at argument-processing and simple stuff.
|
2007-10-11 07:30:36 +00:00
|
|
|
def test_options(self):
|
|
|
|
fileutil.rm_dir("cli/test_options")
|
|
|
|
fileutil.make_dirs("cli/test_options")
|
2008-01-04 00:48:53 +00:00
|
|
|
fileutil.make_dirs("cli/test_options/private")
|
2007-10-11 07:30:36 +00:00
|
|
|
open("cli/test_options/node.url","w").write("http://localhost:8080/\n")
|
2007-12-05 00:00:58 +00:00
|
|
|
filenode_uri = uri.WriteableSSKFileURI(writekey="\x00"*16,
|
|
|
|
fingerprint="\x00"*32)
|
|
|
|
private_uri = uri.NewDirectoryURI(filenode_uri).to_string()
|
2008-01-04 00:48:53 +00:00
|
|
|
open("cli/test_options/private/root_dir.cap", "w").write(private_uri + "\n")
|
2007-10-11 07:30:36 +00:00
|
|
|
o = cli.ListOptions()
|
|
|
|
o.parseOptions(["--node-directory", "cli/test_options"])
|
|
|
|
self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
|
2008-05-20 02:28:50 +00:00
|
|
|
self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri)
|
|
|
|
self.failUnlessEqual(o.where, "")
|
2007-10-11 07:30:36 +00:00
|
|
|
|
|
|
|
o = cli.ListOptions()
|
|
|
|
o.parseOptions(["--node-directory", "cli/test_options",
|
|
|
|
"--node-url", "http://example.org:8111/"])
|
|
|
|
self.failUnlessEqual(o['node-url'], "http://example.org:8111/")
|
2008-05-20 02:28:50 +00:00
|
|
|
self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri)
|
|
|
|
self.failUnlessEqual(o.where, "")
|
2007-10-11 07:30:36 +00:00
|
|
|
|
|
|
|
o = cli.ListOptions()
|
|
|
|
o.parseOptions(["--node-directory", "cli/test_options",
|
2008-01-08 17:41:27 +00:00
|
|
|
"--dir-cap", "root"])
|
2007-10-11 07:30:36 +00:00
|
|
|
self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
|
2008-05-20 02:28:50 +00:00
|
|
|
self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], "root")
|
|
|
|
self.failUnlessEqual(o.where, "")
|
2007-10-11 07:30:36 +00:00
|
|
|
|
|
|
|
o = cli.ListOptions()
|
2007-12-05 00:00:58 +00:00
|
|
|
other_filenode_uri = uri.WriteableSSKFileURI(writekey="\x11"*16,
|
|
|
|
fingerprint="\x11"*32)
|
|
|
|
other_uri = uri.NewDirectoryURI(other_filenode_uri).to_string()
|
2007-10-11 07:30:36 +00:00
|
|
|
o.parseOptions(["--node-directory", "cli/test_options",
|
2008-01-08 17:41:27 +00:00
|
|
|
"--dir-cap", other_uri])
|
2007-10-11 07:30:36 +00:00
|
|
|
self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
|
2008-05-20 02:28:50 +00:00
|
|
|
self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri)
|
|
|
|
self.failUnlessEqual(o.where, "")
|
2007-10-11 07:30:36 +00:00
|
|
|
|
|
|
|
o = cli.ListOptions()
|
|
|
|
o.parseOptions(["--node-directory", "cli/test_options",
|
2008-01-08 17:41:27 +00:00
|
|
|
"--dir-cap", other_uri, "subdir"])
|
2007-10-11 07:30:36 +00:00
|
|
|
self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
|
2008-05-20 02:28:50 +00:00
|
|
|
self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri)
|
|
|
|
self.failUnlessEqual(o.where, "subdir")
|
2008-01-14 20:43:25 +00:00
|
|
|
|
|
|
|
def _dump_cap(self, *args):
|
|
|
|
config = debug.DumpCapOptions()
|
2008-08-12 21:46:34 +00:00
|
|
|
config.stdout,config.stderr = StringIO(), StringIO()
|
2008-01-14 20:43:25 +00:00
|
|
|
config.parseOptions(args)
|
2008-08-12 21:46:34 +00:00
|
|
|
debug.dump_cap(config)
|
|
|
|
self.failIf(config.stderr.getvalue())
|
|
|
|
output = config.stdout.getvalue()
|
2008-01-14 20:43:25 +00:00
|
|
|
return output
|
|
|
|
|
|
|
|
def test_dump_cap_chk(self):
|
|
|
|
key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
|
2008-02-01 19:27:37 +00:00
|
|
|
storage_index = hashutil.storage_index_hash(key)
|
2008-01-14 20:43:25 +00:00
|
|
|
uri_extension_hash = hashutil.uri_extension_hash("stuff")
|
|
|
|
needed_shares = 25
|
|
|
|
total_shares = 100
|
|
|
|
size = 1234
|
|
|
|
u = uri.CHKFileURI(key=key,
|
|
|
|
uri_extension_hash=uri_extension_hash,
|
|
|
|
needed_shares=needed_shares,
|
|
|
|
total_shares=total_shares,
|
|
|
|
size=size)
|
|
|
|
output = self._dump_cap(u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("CHK File:" in output, output)
|
2008-02-15 02:27:47 +00:00
|
|
|
self.failUnless("key: aaaqeayeaudaocajbifqydiob4" in output, output)
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
|
|
|
|
self.failUnless("size: 1234" in output, output)
|
|
|
|
self.failUnless("k/N: 25/100" in output, output)
|
|
|
|
self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
2008-02-15 02:27:47 +00:00
|
|
|
output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
|
2008-01-14 20:43:25 +00:00
|
|
|
u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("client renewal secret: znxmki5zdibb5qlt46xbdvk2t55j7hibejq3i5ijyurkr6m6jkhq" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
|
|
|
output = self._dump_cap(u.get_verifier().to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failIf("key: " in output, output)
|
|
|
|
self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
|
|
|
|
self.failUnless("size: 1234" in output, output)
|
|
|
|
self.failUnless("k/N: 25/100" in output, output)
|
|
|
|
self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
2008-01-14 21:12:27 +00:00
|
|
|
prefixed_u = "http://127.0.0.1/uri/%s" % urllib.quote(u.to_string())
|
|
|
|
output = self._dump_cap(prefixed_u)
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("CHK File:" in output, output)
|
2008-02-15 02:27:47 +00:00
|
|
|
self.failUnless("key: aaaqeayeaudaocajbifqydiob4" in output, output)
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
|
|
|
|
self.failUnless("size: 1234" in output, output)
|
|
|
|
self.failUnless("k/N: 25/100" in output, output)
|
|
|
|
self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
|
2008-01-14 21:12:27 +00:00
|
|
|
|
2008-01-14 20:43:25 +00:00
|
|
|
def test_dump_cap_lit(self):
|
|
|
|
u = uri.LiteralFileURI("this is some data")
|
|
|
|
output = self._dump_cap(u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("Literal File URI:" in output, output)
|
|
|
|
self.failUnless("data: this is some data" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
|
|
|
def test_dump_cap_ssk(self):
|
|
|
|
writekey = "\x01" * 16
|
|
|
|
fingerprint = "\xfe" * 32
|
|
|
|
u = uri.WriteableSSKFileURI(writekey, fingerprint)
|
|
|
|
|
|
|
|
output = self._dump_cap(u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("SSK Writeable URI:" in output, output)
|
2008-02-15 02:27:47 +00:00
|
|
|
self.failUnless("writekey: aeaqcaibaeaqcaibaeaqcaibae" in output, output)
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
|
|
|
|
self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
|
2008-02-15 02:27:47 +00:00
|
|
|
self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
2008-02-15 02:58:01 +00:00
|
|
|
output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
|
2008-01-14 20:43:25 +00:00
|
|
|
u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
|
|
|
fileutil.make_dirs("cli/test_dump_cap/private")
|
|
|
|
f = open("cli/test_dump_cap/private/secret", "w")
|
2008-02-15 02:58:01 +00:00
|
|
|
f.write("5s33nk3qpvnj2fw3z4mnm2y6fa\n")
|
2008-01-14 20:43:25 +00:00
|
|
|
f.close()
|
|
|
|
output = self._dump_cap("--client-dir", "cli/test_dump_cap",
|
|
|
|
u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
|
|
|
output = self._dump_cap("--client-dir", "cli/test_dump_cap_BOGUS",
|
|
|
|
u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failIf("file renewal secret:" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
|
|
|
output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
|
|
|
|
u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
|
|
|
|
self.failIf("file renewal secret:" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
|
|
|
output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
|
2008-02-15 02:58:01 +00:00
|
|
|
"--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
|
2008-01-14 20:43:25 +00:00
|
|
|
u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
|
|
|
|
self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
|
|
|
|
self.failUnless("lease renewal secret: 7pjtaumrb7znzkkbvekkmuwpqfjyfyamznfz4bwwvmh4nw33lorq" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
|
|
|
u = u.get_readonly()
|
|
|
|
output = self._dump_cap(u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("SSK Read-only URI:" in output, output)
|
|
|
|
self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
|
|
|
|
self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
|
|
|
|
self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
|
|
|
u = u.get_verifier()
|
|
|
|
output = self._dump_cap(u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("SSK Verifier URI:" in output, output)
|
|
|
|
self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
|
|
|
|
self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
|
|
|
def test_dump_cap_directory(self):
|
|
|
|
writekey = "\x01" * 16
|
|
|
|
fingerprint = "\xfe" * 32
|
|
|
|
u1 = uri.WriteableSSKFileURI(writekey, fingerprint)
|
|
|
|
u = uri.NewDirectoryURI(u1)
|
|
|
|
|
|
|
|
output = self._dump_cap(u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("Directory Writeable URI:" in output, output)
|
|
|
|
self.failUnless("writekey: aeaqcaibaeaqcaibaeaqcaibae" in output,
|
|
|
|
output)
|
|
|
|
self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
|
|
|
|
self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output,
|
|
|
|
output)
|
2008-02-15 02:27:47 +00:00
|
|
|
self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
2008-02-15 02:58:01 +00:00
|
|
|
output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
|
2008-01-14 20:43:25 +00:00
|
|
|
u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
|
|
|
output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
|
|
|
|
u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
|
|
|
|
self.failIf("file renewal secret:" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
|
|
|
output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
|
2008-02-15 02:58:01 +00:00
|
|
|
"--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
|
2008-01-14 20:43:25 +00:00
|
|
|
u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
|
|
|
|
self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
|
|
|
|
self.failUnless("lease renewal secret: 7pjtaumrb7znzkkbvekkmuwpqfjyfyamznfz4bwwvmh4nw33lorq" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
|
|
|
u = u.get_readonly()
|
|
|
|
output = self._dump_cap(u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("Directory Read-only URI:" in output, output)
|
|
|
|
self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
|
|
|
|
self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
|
|
|
|
self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
|
|
|
u = u.get_verifier()
|
|
|
|
output = self._dump_cap(u.to_string())
|
2008-02-15 02:58:01 +00:00
|
|
|
self.failUnless("Directory Verifier URI:" in output, output)
|
|
|
|
self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
|
2008-02-15 02:27:47 +00:00
|
|
|
self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
|
2008-01-14 20:43:25 +00:00
|
|
|
|
2008-10-29 22:10:10 +00:00
|
|
|
def _catalog_shares(self, *basedirs):
|
|
|
|
o = debug.CatalogSharesOptions()
|
|
|
|
o.stdout,o.stderr = StringIO(), StringIO()
|
|
|
|
args = list(basedirs)
|
|
|
|
o.parseOptions(args)
|
|
|
|
debug.catalog_shares(o)
|
|
|
|
out = o.stdout.getvalue()
|
|
|
|
err = o.stderr.getvalue()
|
|
|
|
return out, err
|
|
|
|
|
|
|
|
def test_catalog_shares_error(self):
|
|
|
|
nodedir1 = "cli/test_catalog_shares/node1"
|
|
|
|
sharedir = os.path.join(nodedir1, "storage", "shares", "mq", "mqfblse6m5a6dh45isu2cg7oji")
|
|
|
|
fileutil.make_dirs(sharedir)
|
|
|
|
f = open(os.path.join(sharedir, "8"), "wb")
|
2008-10-30 21:54:47 +00:00
|
|
|
open("cli/test_catalog_shares/node1/storage/shares/mq/not-a-dir", "wb").close()
|
2008-10-29 22:10:10 +00:00
|
|
|
# write a bogus share that looks a little bit like CHK
|
|
|
|
f.write("\x00\x00\x00\x01" + "\xff" * 200) # this triggers an assert
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
nodedir2 = "cli/test_catalog_shares/node2"
|
|
|
|
fileutil.make_dirs(nodedir2)
|
2008-10-30 21:54:47 +00:00
|
|
|
open("cli/test_catalog_shares/node1/storage/shares/not-a-dir", "wb").close()
|
2008-10-29 22:10:10 +00:00
|
|
|
|
|
|
|
# now make sure that the 'catalog-shares' commands survives the error
|
|
|
|
out, err = self._catalog_shares(nodedir1, nodedir2)
|
|
|
|
self.failUnlessEqual(out, "", out)
|
2008-10-30 19:32:04 +00:00
|
|
|
self.failUnless("Error processing " in err,
|
|
|
|
"didn't see 'error processing' in '%s'" % err)
|
|
|
|
#self.failUnless(nodedir1 in err,
|
|
|
|
# "didn't see '%s' in '%s'" % (nodedir1, err))
|
|
|
|
# windows mangles the path, and os.path.join isn't enough to make
|
|
|
|
# up for it, so just look for individual strings
|
|
|
|
self.failUnless("node1" in err,
|
|
|
|
"didn't see 'node1' in '%s'" % err)
|
|
|
|
self.failUnless("mqfblse6m5a6dh45isu2cg7oji" in err,
|
|
|
|
"didn't see 'mqfblse6m5a6dh45isu2cg7oji' in '%s'" % err)
|
2008-10-29 22:10:10 +00:00
|
|
|
|
|
|
|
|
2008-08-02 02:29:38 +00:00
|
|
|
class CLITestMixin:
|
2008-08-02 02:10:41 +00:00
|
|
|
def do_cli(self, verb, *args, **kwargs):
|
|
|
|
nodeargs = [
|
|
|
|
"--node-directory", self.getdir("client0"),
|
|
|
|
]
|
|
|
|
argv = [verb] + nodeargs + list(args)
|
|
|
|
stdin = kwargs.get("stdin", "")
|
|
|
|
stdout, stderr = StringIO(), StringIO()
|
|
|
|
d = threads.deferToThread(runner.runner, argv, run_by_human=False,
|
|
|
|
stdin=StringIO(stdin),
|
|
|
|
stdout=stdout, stderr=stderr)
|
2008-12-03 03:08:28 +00:00
|
|
|
def _done(rc):
|
|
|
|
return rc, stdout.getvalue(), stderr.getvalue()
|
2008-08-02 02:10:41 +00:00
|
|
|
d.addCallback(_done)
|
|
|
|
return d
|
|
|
|
|
2008-08-02 02:29:38 +00:00
|
|
|
class CreateAlias(SystemTestMixin, CLITestMixin, unittest.TestCase):
|
|
|
|
|
2008-08-12 01:20:23 +00:00
|
|
|
def _test_webopen(self, args, expected_url):
|
|
|
|
woo = cli.WebopenOptions()
|
|
|
|
all_args = ["--node-directory", self.getdir("client0")] + list(args)
|
|
|
|
woo.parseOptions(all_args)
|
|
|
|
urls = []
|
|
|
|
rc = cli.webopen(woo, urls.append)
|
|
|
|
self.failUnlessEqual(rc, 0)
|
|
|
|
self.failUnlessEqual(len(urls), 1)
|
|
|
|
self.failUnlessEqual(urls[0], expected_url)
|
|
|
|
|
2008-08-02 02:10:41 +00:00
|
|
|
def test_create(self):
|
|
|
|
self.basedir = os.path.dirname(self.mktemp())
|
|
|
|
d = self.set_up_nodes()
|
|
|
|
d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
|
2008-12-03 03:08:28 +00:00
|
|
|
def _done((rc,stdout,stderr)):
|
2008-08-02 02:10:41 +00:00
|
|
|
self.failUnless("Alias 'tahoe' created" in stdout)
|
|
|
|
self.failIf(stderr)
|
|
|
|
aliases = get_aliases(self.getdir("client0"))
|
|
|
|
self.failUnless("tahoe" in aliases)
|
|
|
|
self.failUnless(aliases["tahoe"].startswith("URI:DIR2:"))
|
|
|
|
d.addCallback(_done)
|
2008-08-12 01:20:23 +00:00
|
|
|
d.addCallback(lambda res: self.do_cli("create-alias", "two"))
|
2008-12-03 03:08:28 +00:00
|
|
|
|
2008-08-12 01:20:23 +00:00
|
|
|
def _stash_urls(res):
|
|
|
|
aliases = get_aliases(self.getdir("client0"))
|
|
|
|
node_url_file = os.path.join(self.getdir("client0"), "node.url")
|
|
|
|
nodeurl = open(node_url_file, "r").read().strip()
|
|
|
|
uribase = nodeurl + "uri/"
|
CLI: rework webopen, and moreover its tests w.r.t. path handling
in the recent reconciliation of webopen patches, I wound up adjusting
webopen to 'pass through' the state of the trailing slash on the given
argument to the resultant url passed to the browser. this change
removes the requirement that arguments must be directories, and allows
webopen to be used with files. it also broke the tests that assumed
that webopen would always normalise the url to have a trailing slash.
in fixing the tests, I realised that, IMHO, there's something deeply
awry with the way tahoe handles paths; specifically in the combination
of '/' being the name of the root path within an alias, but a leading
slash on paths, e.g. 'alias:/path', is catagorically incorrect. i.e.
'tahoe:' == 'tahoe:/' == '/'
but 'tahoe:/foo' is an invalid path, and must be 'tahoe:foo'
I wound up making the internals of webopen simply spot a 'path' of
'/' and smash it to '', which 'fixes' webopen to match the behaviour
of tahoe's path handling elsewhere, but that special case sort of
points to the weirdness.
(fwiw, I personally found the fact that the leading / in a path was
disallowed to be weird - I'm just used to seeing paths qualified by
the leading / I guess - so in a debate about normalising path handling
I'd vote to include the /)
2008-09-24 16:45:23 +00:00
|
|
|
self.tahoe_url = uribase + urllib.quote(aliases["tahoe"])
|
|
|
|
self.tahoe_subdir_url = self.tahoe_url + "/subdir"
|
|
|
|
self.two_url = uribase + urllib.quote(aliases["two"])
|
2008-12-03 03:20:22 +00:00
|
|
|
self.two_uri = aliases["two"]
|
2008-08-12 01:20:23 +00:00
|
|
|
d.addCallback(_stash_urls)
|
|
|
|
|
2008-12-03 03:20:22 +00:00
|
|
|
d.addCallback(lambda res: self.do_cli("create-alias", "two")) # dup
|
|
|
|
def _check_create_duplicate((rc,stdout,stderr)):
|
|
|
|
self.failIfEqual(rc, 0)
|
|
|
|
self.failUnless("Alias 'two' already exists!" in stderr)
|
|
|
|
aliases = get_aliases(self.getdir("client0"))
|
|
|
|
self.failUnlessEqual(aliases["two"], self.two_uri)
|
|
|
|
d.addCallback(_check_create_duplicate)
|
|
|
|
|
|
|
|
d.addCallback(lambda res: self.do_cli("add-alias", "added", self.two_uri))
|
|
|
|
def _check_add((rc,stdout,stderr)):
|
|
|
|
self.failUnlessEqual(rc, 0)
|
|
|
|
self.failUnless("Alias 'added' added" in stdout)
|
|
|
|
d.addCallback(_check_add)
|
|
|
|
|
|
|
|
# check add-alias with a duplicate
|
|
|
|
d.addCallback(lambda res: self.do_cli("add-alias", "two", self.two_uri))
|
|
|
|
def _check_add_duplicate((rc,stdout,stderr)):
|
|
|
|
self.failIfEqual(rc, 0)
|
|
|
|
self.failUnless("Alias 'two' already exists!" in stderr)
|
|
|
|
aliases = get_aliases(self.getdir("client0"))
|
|
|
|
self.failUnlessEqual(aliases["two"], self.two_uri)
|
|
|
|
d.addCallback(_check_add_duplicate)
|
|
|
|
|
CLI: rework webopen, and moreover its tests w.r.t. path handling
in the recent reconciliation of webopen patches, I wound up adjusting
webopen to 'pass through' the state of the trailing slash on the given
argument to the resultant url passed to the browser. this change
removes the requirement that arguments must be directories, and allows
webopen to be used with files. it also broke the tests that assumed
that webopen would always normalise the url to have a trailing slash.
in fixing the tests, I realised that, IMHO, there's something deeply
awry with the way tahoe handles paths; specifically in the combination
of '/' being the name of the root path within an alias, but a leading
slash on paths, e.g. 'alias:/path', is catagorically incorrect. i.e.
'tahoe:' == 'tahoe:/' == '/'
but 'tahoe:/foo' is an invalid path, and must be 'tahoe:foo'
I wound up making the internals of webopen simply spot a 'path' of
'/' and smash it to '', which 'fixes' webopen to match the behaviour
of tahoe's path handling elsewhere, but that special case sort of
points to the weirdness.
(fwiw, I personally found the fact that the leading / in a path was
disallowed to be weird - I'm just used to seeing paths qualified by
the leading / I guess - so in a debate about normalising path handling
I'd vote to include the /)
2008-09-24 16:45:23 +00:00
|
|
|
def _test_urls(junk):
|
|
|
|
self._test_webopen([], self.tahoe_url)
|
|
|
|
self._test_webopen(["/"], self.tahoe_url)
|
|
|
|
self._test_webopen(["tahoe:"], self.tahoe_url)
|
|
|
|
self._test_webopen(["tahoe:/"], self.tahoe_url)
|
|
|
|
self._test_webopen(["tahoe:subdir"], self.tahoe_subdir_url)
|
|
|
|
self._test_webopen(["tahoe:subdir/"], self.tahoe_subdir_url + '/')
|
|
|
|
self._test_webopen(["tahoe:subdir/file"], self.tahoe_subdir_url + '/file')
|
2008-12-03 03:08:28 +00:00
|
|
|
# if "file" is indeed a file, then the url produced by webopen in
|
|
|
|
# this case is disallowed by the webui. but by design, webopen
|
|
|
|
# passes through the mistake from the user to the resultant
|
|
|
|
# webopened url
|
CLI: rework webopen, and moreover its tests w.r.t. path handling
in the recent reconciliation of webopen patches, I wound up adjusting
webopen to 'pass through' the state of the trailing slash on the given
argument to the resultant url passed to the browser. this change
removes the requirement that arguments must be directories, and allows
webopen to be used with files. it also broke the tests that assumed
that webopen would always normalise the url to have a trailing slash.
in fixing the tests, I realised that, IMHO, there's something deeply
awry with the way tahoe handles paths; specifically in the combination
of '/' being the name of the root path within an alias, but a leading
slash on paths, e.g. 'alias:/path', is catagorically incorrect. i.e.
'tahoe:' == 'tahoe:/' == '/'
but 'tahoe:/foo' is an invalid path, and must be 'tahoe:foo'
I wound up making the internals of webopen simply spot a 'path' of
'/' and smash it to '', which 'fixes' webopen to match the behaviour
of tahoe's path handling elsewhere, but that special case sort of
points to the weirdness.
(fwiw, I personally found the fact that the leading / in a path was
disallowed to be weird - I'm just used to seeing paths qualified by
the leading / I guess - so in a debate about normalising path handling
I'd vote to include the /)
2008-09-24 16:45:23 +00:00
|
|
|
self._test_webopen(["tahoe:subdir/file/"], self.tahoe_subdir_url + '/file/')
|
|
|
|
self._test_webopen(["two:"], self.two_url)
|
|
|
|
d.addCallback(_test_urls)
|
2008-08-12 01:20:23 +00:00
|
|
|
|
2008-08-02 02:10:41 +00:00
|
|
|
return d
|
2008-08-01 22:10:09 +00:00
|
|
|
|
2008-08-02 02:29:38 +00:00
|
|
|
class Put(SystemTestMixin, CLITestMixin, unittest.TestCase):
|
2008-08-01 22:10:09 +00:00
|
|
|
|
2008-08-02 02:27:29 +00:00
|
|
|
def test_unlinked_immutable_stdin(self):
|
|
|
|
# tahoe get `echo DATA | tahoe put`
|
|
|
|
# tahoe get `echo DATA | tahoe put -`
|
|
|
|
|
2008-08-01 22:10:09 +00:00
|
|
|
self.basedir = self.mktemp()
|
|
|
|
DATA = "data" * 100
|
|
|
|
d = self.set_up_nodes()
|
|
|
|
d.addCallback(lambda res: self.do_cli("put", stdin=DATA))
|
|
|
|
def _uploaded(res):
|
2008-12-03 03:08:28 +00:00
|
|
|
(rc, stdout, stderr) = res
|
2008-08-02 02:47:34 +00:00
|
|
|
self.failUnless("waiting for file data on stdin.." in stderr)
|
2008-12-02 00:24:21 +00:00
|
|
|
self.failUnless("200 OK" in stderr, stderr)
|
2008-08-02 02:27:29 +00:00
|
|
|
self.readcap = stdout
|
|
|
|
self.failUnless(self.readcap.startswith("URI:CHK:"))
|
2008-08-01 22:10:09 +00:00
|
|
|
d.addCallback(_uploaded)
|
2008-08-02 02:27:29 +00:00
|
|
|
d.addCallback(lambda res: self.do_cli("get", self.readcap))
|
2008-08-01 22:10:09 +00:00
|
|
|
def _downloaded(res):
|
2008-12-03 03:08:28 +00:00
|
|
|
(rc, stdout, stderr) = res
|
2008-08-01 22:10:09 +00:00
|
|
|
self.failUnlessEqual(stderr, "")
|
|
|
|
self.failUnlessEqual(stdout, DATA)
|
|
|
|
d.addCallback(_downloaded)
|
2008-08-02 02:27:29 +00:00
|
|
|
d.addCallback(lambda res: self.do_cli("put", "-", stdin=DATA))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,stdout,stderr):
|
2008-08-02 02:27:29 +00:00
|
|
|
self.failUnlessEqual(stdout, self.readcap))
|
2008-08-01 22:10:09 +00:00
|
|
|
return d
|
|
|
|
|
2008-08-02 02:27:29 +00:00
|
|
|
def test_unlinked_immutable_from_file(self):
|
|
|
|
# tahoe put file.txt
|
|
|
|
# tahoe put ./file.txt
|
|
|
|
# tahoe put /tmp/file.txt
|
|
|
|
# tahoe put ~/file.txt
|
|
|
|
self.basedir = os.path.dirname(self.mktemp())
|
|
|
|
# this will be "allmydata.test.test_cli/Put/test_put_from_file/RANDOM"
|
|
|
|
# and the RANDOM directory will exist. Raw mktemp returns a filename.
|
|
|
|
|
|
|
|
rel_fn = os.path.join(self.basedir, "DATAFILE")
|
|
|
|
abs_fn = os.path.abspath(rel_fn)
|
|
|
|
# we make the file small enough to fit in a LIT file, for speed
|
|
|
|
f = open(rel_fn, "w")
|
2008-08-02 02:47:34 +00:00
|
|
|
f.write("short file")
|
2008-08-02 02:27:29 +00:00
|
|
|
f.close()
|
|
|
|
d = self.set_up_nodes()
|
|
|
|
d.addCallback(lambda res: self.do_cli("put", rel_fn))
|
2008-12-03 03:08:28 +00:00
|
|
|
def _uploaded((rc,stdout,stderr)):
|
2008-08-02 02:27:29 +00:00
|
|
|
readcap = stdout
|
|
|
|
self.failUnless(readcap.startswith("URI:LIT:"))
|
|
|
|
self.readcap = readcap
|
|
|
|
d.addCallback(_uploaded)
|
|
|
|
d.addCallback(lambda res: self.do_cli("put", "./" + rel_fn))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,stdout,stderr):
|
2008-08-02 02:27:29 +00:00
|
|
|
self.failUnlessEqual(stdout, self.readcap))
|
|
|
|
d.addCallback(lambda res: self.do_cli("put", abs_fn))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,stdout,stderr):
|
2008-08-02 02:27:29 +00:00
|
|
|
self.failUnlessEqual(stdout, self.readcap))
|
|
|
|
# we just have to assume that ~ is handled properly
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_immutable_from_file(self):
|
|
|
|
# tahoe put file.txt uploaded.txt
|
|
|
|
# tahoe - uploaded.txt
|
|
|
|
# tahoe put file.txt subdir/uploaded.txt
|
|
|
|
# tahoe put file.txt tahoe:uploaded.txt
|
|
|
|
# tahoe put file.txt tahoe:subdir/uploaded.txt
|
|
|
|
# tahoe put file.txt DIRCAP:./uploaded.txt
|
|
|
|
# tahoe put file.txt DIRCAP:./subdir/uploaded.txt
|
|
|
|
self.basedir = os.path.dirname(self.mktemp())
|
|
|
|
|
|
|
|
rel_fn = os.path.join(self.basedir, "DATAFILE")
|
|
|
|
abs_fn = os.path.abspath(rel_fn)
|
|
|
|
# we make the file small enough to fit in a LIT file, for speed
|
2008-08-02 02:47:34 +00:00
|
|
|
DATA = "short file"
|
|
|
|
DATA2 = "short file two"
|
2008-08-02 02:27:29 +00:00
|
|
|
f = open(rel_fn, "w")
|
|
|
|
f.write(DATA)
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
d = self.set_up_nodes()
|
|
|
|
d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
|
|
|
|
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.do_cli("put", rel_fn, "uploaded.txt"))
|
2008-12-03 03:08:28 +00:00
|
|
|
def _uploaded((rc,stdout,stderr)):
|
2008-08-02 02:27:29 +00:00
|
|
|
readcap = stdout.strip()
|
|
|
|
self.failUnless(readcap.startswith("URI:LIT:"))
|
|
|
|
self.failUnless("201 Created" in stderr, stderr)
|
|
|
|
self.readcap = readcap
|
|
|
|
d.addCallback(_uploaded)
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.do_cli("get", "tahoe:uploaded.txt"))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,stdout,stderr):
|
2008-08-02 02:27:29 +00:00
|
|
|
self.failUnlessEqual(stdout, DATA))
|
|
|
|
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.do_cli("put", "-", "uploaded.txt", stdin=DATA2))
|
2008-12-03 03:08:28 +00:00
|
|
|
def _replaced((rc,stdout,stderr)):
|
2008-08-02 02:27:29 +00:00
|
|
|
readcap = stdout.strip()
|
|
|
|
self.failUnless(readcap.startswith("URI:LIT:"))
|
|
|
|
self.failUnless("200 OK" in stderr, stderr)
|
|
|
|
d.addCallback(_replaced)
|
|
|
|
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.do_cli("put", rel_fn, "subdir/uploaded2.txt"))
|
|
|
|
d.addCallback(lambda res: self.do_cli("get", "subdir/uploaded2.txt"))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,stdout,stderr):
|
2008-08-02 02:27:29 +00:00
|
|
|
self.failUnlessEqual(stdout, DATA))
|
|
|
|
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.do_cli("put", rel_fn, "tahoe:uploaded3.txt"))
|
|
|
|
d.addCallback(lambda res: self.do_cli("get", "tahoe:uploaded3.txt"))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,stdout,stderr):
|
2008-08-02 02:27:29 +00:00
|
|
|
self.failUnlessEqual(stdout, DATA))
|
|
|
|
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.do_cli("put", rel_fn, "tahoe:subdir/uploaded4.txt"))
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.do_cli("get", "tahoe:subdir/uploaded4.txt"))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,stdout,stderr):
|
2008-08-02 02:27:29 +00:00
|
|
|
self.failUnlessEqual(stdout, DATA))
|
|
|
|
|
|
|
|
def _get_dircap(res):
|
|
|
|
self.dircap = get_aliases(self.getdir("client0"))["tahoe"]
|
|
|
|
d.addCallback(_get_dircap)
|
|
|
|
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.do_cli("put", rel_fn,
|
|
|
|
self.dircap+":./uploaded5.txt"))
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.do_cli("get", "tahoe:uploaded5.txt"))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,stdout,stderr):
|
2008-08-02 02:27:29 +00:00
|
|
|
self.failUnlessEqual(stdout, DATA))
|
|
|
|
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.do_cli("put", rel_fn,
|
|
|
|
self.dircap+":./subdir/uploaded6.txt"))
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.do_cli("get", "tahoe:subdir/uploaded6.txt"))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,stdout,stderr):
|
2008-08-02 02:27:29 +00:00
|
|
|
self.failUnlessEqual(stdout, DATA))
|
|
|
|
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_mutable_unlinked(self):
|
|
|
|
# FILECAP = `echo DATA | tahoe put --mutable`
|
|
|
|
# tahoe get FILECAP, compare against DATA
|
|
|
|
# echo DATA2 | tahoe put - FILECAP
|
|
|
|
# tahoe get FILECAP, compare against DATA2
|
|
|
|
# tahoe put file.txt FILECAP
|
|
|
|
self.basedir = os.path.dirname(self.mktemp())
|
2008-08-01 22:10:09 +00:00
|
|
|
DATA = "data" * 100
|
|
|
|
DATA2 = "two" * 100
|
2008-08-02 02:27:29 +00:00
|
|
|
rel_fn = os.path.join(self.basedir, "DATAFILE")
|
|
|
|
abs_fn = os.path.abspath(rel_fn)
|
|
|
|
DATA3 = "three" * 100
|
|
|
|
f = open(rel_fn, "w")
|
|
|
|
f.write(DATA3)
|
|
|
|
f.close()
|
|
|
|
|
2008-08-01 22:10:09 +00:00
|
|
|
d = self.set_up_nodes()
|
2008-08-02 02:27:29 +00:00
|
|
|
|
2008-08-01 22:10:09 +00:00
|
|
|
d.addCallback(lambda res: self.do_cli("put", "--mutable", stdin=DATA))
|
|
|
|
def _created(res):
|
2008-12-03 03:08:28 +00:00
|
|
|
(rc, stdout, stderr) = res
|
2008-08-02 02:47:34 +00:00
|
|
|
self.failUnless("waiting for file data on stdin.." in stderr)
|
|
|
|
self.failUnless("200 OK" in stderr)
|
2008-08-01 22:10:09 +00:00
|
|
|
self.filecap = stdout
|
|
|
|
self.failUnless(self.filecap.startswith("URI:SSK:"))
|
|
|
|
d.addCallback(_created)
|
|
|
|
d.addCallback(lambda res: self.do_cli("get", self.filecap))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA))
|
2008-08-02 02:27:29 +00:00
|
|
|
|
|
|
|
d.addCallback(lambda res: self.do_cli("put", "-", self.filecap, stdin=DATA2))
|
2008-08-01 22:10:09 +00:00
|
|
|
def _replaced(res):
|
2008-12-03 03:08:28 +00:00
|
|
|
(rc, stdout, stderr) = res
|
2008-08-02 02:47:34 +00:00
|
|
|
self.failUnless("waiting for file data on stdin.." in stderr)
|
|
|
|
self.failUnless("200 OK" in stderr)
|
2008-08-01 22:10:09 +00:00
|
|
|
self.failUnlessEqual(self.filecap, stdout)
|
|
|
|
d.addCallback(_replaced)
|
|
|
|
d.addCallback(lambda res: self.do_cli("get", self.filecap))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
|
2008-08-02 02:27:29 +00:00
|
|
|
|
|
|
|
d.addCallback(lambda res: self.do_cli("put", rel_fn, self.filecap))
|
|
|
|
def _replaced2(res):
|
2008-12-03 03:08:28 +00:00
|
|
|
(rc, stdout, stderr) = res
|
2008-08-02 02:47:34 +00:00
|
|
|
self.failUnless("200 OK" in stderr)
|
2008-08-02 02:27:29 +00:00
|
|
|
self.failUnlessEqual(self.filecap, stdout)
|
|
|
|
d.addCallback(_replaced2)
|
|
|
|
d.addCallback(lambda res: self.do_cli("get", self.filecap))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA3))
|
2008-08-02 02:27:29 +00:00
|
|
|
|
2008-08-01 22:10:09 +00:00
|
|
|
return d
|
2008-08-02 02:27:29 +00:00
|
|
|
|
|
|
|
def test_mutable(self):
|
2008-08-04 20:26:43 +00:00
|
|
|
# echo DATA1 | tahoe put --mutable - uploaded.txt
|
|
|
|
# echo DATA2 | tahoe put - uploaded.txt # should modify-in-place
|
|
|
|
# tahoe get uploaded.txt, compare against DATA2
|
|
|
|
|
|
|
|
self.basedir = os.path.dirname(self.mktemp())
|
|
|
|
DATA1 = "data" * 100
|
|
|
|
fn1 = os.path.join(self.basedir, "DATA1")
|
|
|
|
f = open(fn1, "w")
|
|
|
|
f.write(DATA1)
|
|
|
|
f.close()
|
|
|
|
DATA2 = "two" * 100
|
|
|
|
fn2 = os.path.join(self.basedir, "DATA2")
|
|
|
|
f = open(fn2, "w")
|
|
|
|
f.write(DATA2)
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
d = self.set_up_nodes()
|
|
|
|
d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.do_cli("put", "--mutable", fn1, "tahoe:uploaded.txt"))
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.do_cli("put", fn2, "tahoe:uploaded.txt"))
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.do_cli("get", "tahoe:uploaded.txt"))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
|
2008-08-04 20:26:43 +00:00
|
|
|
return d
|
2008-11-13 12:19:51 +00:00
|
|
|
|
|
|
|
class Cp(SystemTestMixin, CLITestMixin, unittest.TestCase):
|
|
|
|
def test_unicode_filename(self):
|
|
|
|
self.basedir = os.path.dirname(self.mktemp())
|
|
|
|
|
2008-11-14 14:41:37 +00:00
|
|
|
fn1 = os.path.join(self.basedir, "Ärtonwall")
|
|
|
|
DATA1 = "unicode file content"
|
|
|
|
open(fn1, "wb").write(DATA1)
|
2008-11-13 12:19:51 +00:00
|
|
|
|
2008-11-14 14:41:37 +00:00
|
|
|
fn2 = os.path.join(self.basedir, "Metallica")
|
|
|
|
DATA2 = "non-unicode file content"
|
|
|
|
open(fn2, "wb").write(DATA2)
|
2008-11-13 12:19:51 +00:00
|
|
|
|
|
|
|
# Bug #534
|
|
|
|
# Assure that uploading a file whose name contains unicode character doesn't
|
|
|
|
# prevent further uploads in the same directory
|
|
|
|
d = self.set_up_nodes()
|
|
|
|
d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
|
|
|
|
d.addCallback(lambda res: self.do_cli("cp", fn1, "tahoe:"))
|
|
|
|
d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:"))
|
|
|
|
|
2008-11-14 14:44:58 +00:00
|
|
|
d.addCallback(lambda res: self.do_cli("get", "tahoe:Ärtonwall"))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA1))
|
2008-11-14 14:44:58 +00:00
|
|
|
|
|
|
|
d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica"))
|
2008-12-03 03:08:28 +00:00
|
|
|
d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
|
2008-11-14 14:44:58 +00:00
|
|
|
|
2008-11-13 12:19:51 +00:00
|
|
|
return d
|