Remove the greater part of the implementation

This commit is contained in:
Jean-Paul Calderone 2020-02-13 10:43:50 -05:00
parent 1fba2d6ea2
commit ef73e18f3d
No known key found for this signature in database
GPG Key ID: 86E6F8BAE797C287
26 changed files with 3 additions and 7583 deletions

View File

@ -85,9 +85,6 @@ _client_config = configutil.ValidConfiguration(
"stats_gatherer.furl",
"storage.plugins",
),
"drop_upload": ( # deprecated already?
"enabled",
),
"ftpd": (
"accounts.file",
"accounts.url",
@ -121,12 +118,6 @@ _client_config = configutil.ValidConfiguration(
"helper": (
"enabled",
),
"magic_folder": (
"download.umask",
"enabled",
"local.directory",
"poll_interval",
),
},
is_valid_section=_is_valid_section,
# Anything in a valid section is a valid item, for now.
@ -681,7 +672,6 @@ class _Client(node.Node, pollmixin.PollMixin):
"""
node.Node.__init__(self, config, main_tub, control_tub, i2p_provider, tor_provider)
self._magic_folders = dict()
self.started_timestamp = time.time()
self.logSource = "Client"
self.encoding_params = self.DEFAULT_ENCODING_PARAMETERS.copy()
@ -707,7 +697,6 @@ class _Client(node.Node, pollmixin.PollMixin):
self.init_helper()
self.init_ftp_server()
self.init_sftp_server()
self.init_magic_folder()
# If the node sees an exit_trigger file, it will poll every second to see
# whether the file still exists, and what its mtime is. If the file does not
@ -968,9 +957,6 @@ class _Client(node.Node, pollmixin.PollMixin):
This returns a local authentication token, which is just some
random data in "api_auth_token" which must be echoed to API
calls.
Currently only the URI '/magic' for magic-folder status; other
endpoints are invited to include this as well, as appropriate.
"""
return self.config.get_private_config('api_auth_token')
@ -1088,40 +1074,6 @@ class _Client(node.Node, pollmixin.PollMixin):
sftp_portstr, pubkey_file, privkey_file)
s.setServiceParent(self)
def init_magic_folder(self):
#print "init_magic_folder"
if self.config.get_config("drop_upload", "enabled", False, boolean=True):
raise node.OldConfigOptionError(
"The [drop_upload] section must be renamed to [magic_folder].\n"
"See docs/frontends/magic-folder.rst for more information."
)
if self.config.get_config("magic_folder", "enabled", False, boolean=True):
from allmydata.frontends import magic_folder
try:
magic_folders = magic_folder.load_magic_folders(self.config._basedir)
except Exception as e:
log.msg("Error loading magic-folder config: {}".format(e))
raise
# start processing the upload queue when we've connected to
# enough servers
threshold = min(self.encoding_params["k"],
self.encoding_params["happy"] + 1)
for (name, mf_config) in magic_folders.items():
self.log("Starting magic_folder '{}'".format(name))
s = magic_folder.MagicFolder.from_config(self, name, mf_config)
self._magic_folders[name] = s
s.setServiceParent(self)
connected_d = self.storage_broker.when_connected_enough(threshold)
def connected_enough(ign, mf):
mf.ready() # returns a Deferred we ignore
return None
connected_d.addCallback(connected_enough, s)
def _check_exit_trigger(self, exit_trigger_file):
if os.path.exists(exit_trigger_file):
mtime = os.stat(exit_trigger_file)[stat.ST_MTIME]

File diff suppressed because it is too large Load Diff

View File

@ -1,204 +0,0 @@
from __future__ import print_function
import sys
from collections import namedtuple
from allmydata.util.dbutil import get_db, DBError
from allmydata.util.eliotutil import (
RELPATH,
VERSION,
LAST_UPLOADED_URI,
LAST_DOWNLOADED_URI,
LAST_DOWNLOADED_TIMESTAMP,
PATHINFO,
validateSetMembership,
validateInstanceOf,
)
from eliot import (
Field,
ActionType,
)
PathEntry = namedtuple('PathEntry', 'size mtime_ns ctime_ns version last_uploaded_uri '
'last_downloaded_uri last_downloaded_timestamp')
PATHENTRY = Field(
u"pathentry",
lambda v: None if v is None else {
"size": v.size,
"mtime_ns": v.mtime_ns,
"ctime_ns": v.ctime_ns,
"version": v.version,
"last_uploaded_uri": v.last_uploaded_uri,
"last_downloaded_uri": v.last_downloaded_uri,
"last_downloaded_timestamp": v.last_downloaded_timestamp,
},
u"The local database state of a file.",
validateInstanceOf((type(None), PathEntry)),
)
_INSERT_OR_UPDATE = Field.for_types(
u"insert_or_update",
[unicode],
u"An indication of whether the record for this upload was new or an update to a previous entry.",
validateSetMembership({u"insert", u"update"}),
)
UPDATE_ENTRY = ActionType(
u"magic-folder-db:update-entry",
[RELPATH, VERSION, LAST_UPLOADED_URI, LAST_DOWNLOADED_URI, LAST_DOWNLOADED_TIMESTAMP, PATHINFO],
[_INSERT_OR_UPDATE],
u"Record some metadata about a relative path in the magic-folder.",
)
# magic-folder db schema version 1
SCHEMA_v1 = """
CREATE TABLE version
(
version INTEGER -- contains one row, set to 1
);
CREATE TABLE local_files
(
path VARCHAR(1024) PRIMARY KEY, -- UTF-8 filename relative to local magic folder dir
size INTEGER, -- ST_SIZE, or NULL if the file has been deleted
mtime_ns INTEGER, -- ST_MTIME in nanoseconds
ctime_ns INTEGER, -- ST_CTIME in nanoseconds
version INTEGER,
last_uploaded_uri VARCHAR(256), -- URI:CHK:...
last_downloaded_uri VARCHAR(256), -- URI:CHK:...
last_downloaded_timestamp TIMESTAMP
);
"""
def get_magicfolderdb(dbfile, stderr=sys.stderr,
create_version=(SCHEMA_v1, 1), just_create=False):
# Open or create the given backupdb file. The parent directory must
# exist.
try:
(sqlite3, db) = get_db(dbfile, stderr, create_version,
just_create=just_create, dbname="magicfolderdb")
if create_version[1] in (1, 2):
return MagicFolderDB(sqlite3, db)
else:
print("invalid magicfolderdb schema version specified", file=stderr)
return None
except DBError as e:
print(e, file=stderr)
return None
class LocalPath(object):
@classmethod
def fromrow(self, row):
p = LocalPath()
p.relpath_u = row[0]
p.entry = PathEntry(*row[1:])
return p
class MagicFolderDB(object):
VERSION = 1
def __init__(self, sqlite_module, connection):
self.sqlite_module = sqlite_module
self.connection = connection
self.cursor = connection.cursor()
def close(self):
self.connection.close()
def get_db_entry(self, relpath_u):
"""
Retrieve the entry in the database for a given path, or return None
if there is no such entry.
"""
c = self.cursor
c.execute("SELECT size, mtime_ns, ctime_ns, version, last_uploaded_uri,"
" last_downloaded_uri, last_downloaded_timestamp"
" FROM local_files"
" WHERE path=?",
(relpath_u,))
row = self.cursor.fetchone()
if not row:
return None
else:
(size, mtime_ns, ctime_ns, version, last_uploaded_uri,
last_downloaded_uri, last_downloaded_timestamp) = row
return PathEntry(size=size, mtime_ns=mtime_ns, ctime_ns=ctime_ns, version=version,
last_uploaded_uri=last_uploaded_uri,
last_downloaded_uri=last_downloaded_uri,
last_downloaded_timestamp=last_downloaded_timestamp)
def get_direct_children(self, relpath_u):
"""
Given the relative path to a directory, return ``LocalPath`` instances
representing all direct children of that directory.
"""
# It would be great to not be interpolating data into query
# statements. However, query parameters are not supported in the
# position where we need them.
sqlitesafe_relpath_u = relpath_u.replace(u"'", u"''")
statement = (
"""
SELECT
path, size, mtime_ns, ctime_ns, version, last_uploaded_uri,
last_downloaded_uri, last_downloaded_timestamp
FROM
local_files
WHERE
-- The "_" used here ensures there is at least one character
-- after the /. This prevents matching the path itself.
path LIKE '{path}/_%' AND
-- The "_" used here serves a similar purpose. This allows
-- matching directory children but avoids matching their
-- children.
path NOT LIKE '{path}/_%/_%'
"""
).format(path=sqlitesafe_relpath_u)
self.cursor.execute(statement)
rows = self.cursor.fetchall()
return list(
LocalPath.fromrow(row)
for row
in rows
)
def get_all_relpaths(self):
"""
Retrieve a set of all relpaths of files that have had an entry in magic folder db
(i.e. that have been downloaded at least once).
"""
self.cursor.execute("SELECT path FROM local_files")
rows = self.cursor.fetchall()
return set([r[0] for r in rows])
def did_upload_version(self, relpath_u, version, last_uploaded_uri, last_downloaded_uri, last_downloaded_timestamp, pathinfo):
action = UPDATE_ENTRY(
relpath=relpath_u,
version=version,
last_uploaded_uri=last_uploaded_uri,
last_downloaded_uri=last_downloaded_uri,
last_downloaded_timestamp=last_downloaded_timestamp,
pathinfo=pathinfo,
)
with action:
try:
self.cursor.execute("INSERT INTO local_files VALUES (?,?,?,?,?,?,?,?)",
(relpath_u, pathinfo.size, pathinfo.mtime_ns, pathinfo.ctime_ns,
version, last_uploaded_uri, last_downloaded_uri,
last_downloaded_timestamp))
action.add_success_fields(insert_or_update=u"insert")
except (self.sqlite_module.IntegrityError, self.sqlite_module.OperationalError):
self.cursor.execute("UPDATE local_files"
" SET size=?, mtime_ns=?, ctime_ns=?, version=?, last_uploaded_uri=?,"
" last_downloaded_uri=?, last_downloaded_timestamp=?"
" WHERE path=?",
(pathinfo.size, pathinfo.mtime_ns, pathinfo.ctime_ns, version,
last_uploaded_uri, last_downloaded_uri, last_downloaded_timestamp,
relpath_u))
action.add_success_fields(insert_or_update=u"update")
self.connection.commit()

View File

@ -1,32 +0,0 @@
import re
import os.path
from allmydata.util.assertutil import precondition, _assert
def path2magic(path):
return re.sub(u'[/@]', lambda m: {u'/': u'@_', u'@': u'@@'}[m.group(0)], path)
def magic2path(path):
return re.sub(u'@[_@]', lambda m: {u'@_': u'/', u'@@': u'@'}[m.group(0)], path)
IGNORE_SUFFIXES = [u'.backup', u'.tmp', u'.conflict']
IGNORE_PREFIXES = [u'.']
def should_ignore_file(path_u):
precondition(isinstance(path_u, unicode), path_u=path_u)
for suffix in IGNORE_SUFFIXES:
if path_u.endswith(suffix):
return True
while path_u != u"":
oldpath_u = path_u
path_u, tail_u = os.path.split(path_u)
if tail_u.startswith(u"."):
return True
if path_u == oldpath_u:
return True # the path was absolute
_assert(len(path_u) < len(oldpath_u), path_u=path_u, oldpath_u=oldpath_u)
return False

View File

@ -1,610 +0,0 @@
from __future__ import print_function
import os
import urllib
from types import NoneType
from six.moves import cStringIO as StringIO
from datetime import datetime
import json
from twisted.python import usage
from allmydata.util.assertutil import precondition
from .common import BaseOptions, BasedirOptions, get_aliases
from .cli import MakeDirectoryOptions, LnOptions, CreateAliasOptions
import tahoe_mv
from allmydata.util.encodingutil import argv_to_abspath, argv_to_unicode, to_str, \
quote_local_unicode_path
from allmydata.scripts.common_http import do_http, BadResponse
from allmydata.util import fileutil
from allmydata import uri
from allmydata.util.abbreviate import abbreviate_space, abbreviate_time
from allmydata.frontends.magic_folder import load_magic_folders
from allmydata.frontends.magic_folder import save_magic_folders
from allmydata.frontends.magic_folder import maybe_upgrade_magic_folders
INVITE_SEPARATOR = "+"
class CreateOptions(BasedirOptions):
nickname = None # NOTE: *not* the "name of this magic-folder"
local_dir = None
synopsis = "MAGIC_ALIAS: [NICKNAME LOCAL_DIR]"
optParameters = [
("poll-interval", "p", "60", "How often to ask for updates"),
("name", "n", "default", "The name of this magic-folder"),
]
description = (
"Create a new magic-folder. If you specify NICKNAME and "
"LOCAL_DIR, this client will also be invited and join "
"using the given nickname. A new alias (see 'tahoe list-aliases') "
"will be added with the master folder's writecap."
)
def parseArgs(self, alias, nickname=None, local_dir=None):
BasedirOptions.parseArgs(self)
alias = argv_to_unicode(alias)
if not alias.endswith(u':'):
raise usage.UsageError("An alias must end with a ':' character.")
self.alias = alias[:-1]
self.nickname = None if nickname is None else argv_to_unicode(nickname)
try:
if int(self['poll-interval']) <= 0:
raise ValueError("should be positive")
except ValueError:
raise usage.UsageError(
"--poll-interval must be a positive integer"
)
# Expand the path relative to the current directory of the CLI command, not the node.
self.local_dir = None if local_dir is None else argv_to_abspath(local_dir, long_path=False)
if self.nickname and not self.local_dir:
raise usage.UsageError("If NICKNAME is specified then LOCAL_DIR must also be specified.")
node_url_file = os.path.join(self['node-directory'], u"node.url")
self['node-url'] = fileutil.read(node_url_file).strip()
def _delegate_options(source_options, target_options):
target_options.aliases = get_aliases(source_options['node-directory'])
target_options["node-url"] = source_options["node-url"]
target_options["node-directory"] = source_options["node-directory"]
target_options["name"] = source_options["name"]
target_options.stdin = StringIO("")
target_options.stdout = StringIO()
target_options.stderr = StringIO()
return target_options
def create(options):
precondition(isinstance(options.alias, unicode), alias=options.alias)
precondition(isinstance(options.nickname, (unicode, NoneType)), nickname=options.nickname)
precondition(isinstance(options.local_dir, (unicode, NoneType)), local_dir=options.local_dir)
# make sure we don't already have a magic-folder with this name before we create the alias
maybe_upgrade_magic_folders(options["node-directory"])
folders = load_magic_folders(options["node-directory"])
if options['name'] in folders:
print("Already have a magic-folder named '{}'".format(options['name']), file=options.stderr)
return 1
# create an alias; this basically just remembers the cap for the
# master directory
from allmydata.scripts import tahoe_add_alias
create_alias_options = _delegate_options(options, CreateAliasOptions())
create_alias_options.alias = options.alias
rc = tahoe_add_alias.create_alias(create_alias_options)
if rc != 0:
print(create_alias_options.stderr.getvalue(), file=options.stderr)
return rc
print(create_alias_options.stdout.getvalue(), file=options.stdout)
if options.nickname is not None:
print(u"Inviting myself as client '{}':".format(options.nickname), file=options.stdout)
invite_options = _delegate_options(options, InviteOptions())
invite_options.alias = options.alias
invite_options.nickname = options.nickname
invite_options['name'] = options['name']
rc = invite(invite_options)
if rc != 0:
print(u"magic-folder: failed to invite after create\n", file=options.stderr)
print(invite_options.stderr.getvalue(), file=options.stderr)
return rc
invite_code = invite_options.stdout.getvalue().strip()
print(u" created invite code", file=options.stdout)
join_options = _delegate_options(options, JoinOptions())
join_options['poll-interval'] = options['poll-interval']
join_options.nickname = options.nickname
join_options.local_dir = options.local_dir
join_options.invite_code = invite_code
rc = join(join_options)
if rc != 0:
print(u"magic-folder: failed to join after create\n", file=options.stderr)
print(join_options.stderr.getvalue(), file=options.stderr)
return rc
print(u" joined new magic-folder", file=options.stdout)
print(
u"Successfully created magic-folder '{}' with alias '{}:' "
u"and client '{}'\nYou must re-start your node before the "
u"magic-folder will be active."
.format(options['name'], options.alias, options.nickname), file=options.stdout)
return 0
class ListOptions(BasedirOptions):
description = (
"List all magic-folders this client has joined"
)
optFlags = [
("json", "", "Produce JSON output")
]
def list_(options):
folders = load_magic_folders(options["node-directory"])
if options["json"]:
_list_json(options, folders)
return 0
_list_human(options, folders)
return 0
def _list_json(options, folders):
"""
List our magic-folders using JSON
"""
info = dict()
for name, details in folders.items():
info[name] = {
u"directory": details["directory"],
}
print(json.dumps(info), file=options.stdout)
return 0
def _list_human(options, folders):
"""
List our magic-folders for a human user
"""
if folders:
print("This client has the following magic-folders:", file=options.stdout)
biggest = max([len(nm) for nm in folders.keys()])
fmt = " {:>%d}: {}" % (biggest, )
for name, details in folders.items():
print(fmt.format(name, details["directory"]), file=options.stdout)
else:
print("No magic-folders", file=options.stdout)
class InviteOptions(BasedirOptions):
nickname = None
synopsis = "MAGIC_ALIAS: NICKNAME"
stdin = StringIO("")
optParameters = [
("name", "n", "default", "The name of this magic-folder"),
]
description = (
"Invite a new participant to a given magic-folder. The resulting "
"invite-code that is printed is secret information and MUST be "
"transmitted securely to the invitee."
)
def parseArgs(self, alias, nickname=None):
BasedirOptions.parseArgs(self)
alias = argv_to_unicode(alias)
if not alias.endswith(u':'):
raise usage.UsageError("An alias must end with a ':' character.")
self.alias = alias[:-1]
self.nickname = argv_to_unicode(nickname)
node_url_file = os.path.join(self['node-directory'], u"node.url")
self['node-url'] = open(node_url_file, "r").read().strip()
aliases = get_aliases(self['node-directory'])
self.aliases = aliases
def invite(options):
precondition(isinstance(options.alias, unicode), alias=options.alias)
precondition(isinstance(options.nickname, unicode), nickname=options.nickname)
from allmydata.scripts import tahoe_mkdir
mkdir_options = _delegate_options(options, MakeDirectoryOptions())
mkdir_options.where = None
rc = tahoe_mkdir.mkdir(mkdir_options)
if rc != 0:
print("magic-folder: failed to mkdir\n", file=options.stderr)
return rc
# FIXME this assumes caps are ASCII.
dmd_write_cap = mkdir_options.stdout.getvalue().strip()
dmd_readonly_cap = uri.from_string(dmd_write_cap).get_readonly().to_string()
if dmd_readonly_cap is None:
print("magic-folder: failed to diminish dmd write cap\n", file=options.stderr)
return 1
magic_write_cap = get_aliases(options["node-directory"])[options.alias]
magic_readonly_cap = uri.from_string(magic_write_cap).get_readonly().to_string()
# tahoe ln CLIENT_READCAP COLLECTIVE_WRITECAP/NICKNAME
ln_options = _delegate_options(options, LnOptions())
ln_options.from_file = unicode(dmd_readonly_cap, 'utf-8')
ln_options.to_file = u"%s/%s" % (unicode(magic_write_cap, 'utf-8'), options.nickname)
rc = tahoe_mv.mv(ln_options, mode="link")
if rc != 0:
print("magic-folder: failed to create link\n", file=options.stderr)
print(ln_options.stderr.getvalue(), file=options.stderr)
return rc
# FIXME: this assumes caps are ASCII.
print("%s%s%s" % (magic_readonly_cap, INVITE_SEPARATOR, dmd_write_cap), file=options.stdout)
return 0
class JoinOptions(BasedirOptions):
synopsis = "INVITE_CODE LOCAL_DIR"
dmd_write_cap = ""
magic_readonly_cap = ""
optParameters = [
("poll-interval", "p", "60", "How often to ask for updates"),
("name", "n", "default", "Name of the magic-folder"),
]
def parseArgs(self, invite_code, local_dir):
BasedirOptions.parseArgs(self)
try:
if int(self['poll-interval']) <= 0:
raise ValueError("should be positive")
except ValueError:
raise usage.UsageError(
"--poll-interval must be a positive integer"
)
# Expand the path relative to the current directory of the CLI command, not the node.
self.local_dir = None if local_dir is None else argv_to_abspath(local_dir, long_path=False)
self.invite_code = to_str(argv_to_unicode(invite_code))
def join(options):
fields = options.invite_code.split(INVITE_SEPARATOR)
if len(fields) != 2:
raise usage.UsageError("Invalid invite code.")
magic_readonly_cap, dmd_write_cap = fields
maybe_upgrade_magic_folders(options["node-directory"])
existing_folders = load_magic_folders(options["node-directory"])
if options['name'] in existing_folders:
print("This client already has a magic-folder named '{}'".format(options['name']), file=options.stderr)
return 1
db_fname = os.path.join(
options["node-directory"],
u"private",
u"magicfolder_{}.sqlite".format(options['name']),
)
if os.path.exists(db_fname):
print("Database '{}' already exists; not overwriting".format(db_fname), file=options.stderr)
return 1
folder = {
u"directory": options.local_dir.encode('utf-8'),
u"collective_dircap": magic_readonly_cap,
u"upload_dircap": dmd_write_cap,
u"poll_interval": options["poll-interval"],
}
existing_folders[options["name"]] = folder
save_magic_folders(options["node-directory"], existing_folders)
return 0
class LeaveOptions(BasedirOptions):
synopsis = "Remove a magic-folder and forget all state"
optParameters = [
("name", "n", "default", "Name of magic-folder to leave"),
]
def leave(options):
from ConfigParser import SafeConfigParser
existing_folders = load_magic_folders(options["node-directory"])
if not existing_folders:
print("No magic-folders at all", file=options.stderr)
return 1
if options["name"] not in existing_folders:
print("No such magic-folder '{}'".format(options["name"]), file=options.stderr)
return 1
privdir = os.path.join(options["node-directory"], u"private")
db_fname = os.path.join(privdir, u"magicfolder_{}.sqlite".format(options["name"]))
# delete from YAML file and re-write it
del existing_folders[options["name"]]
save_magic_folders(options["node-directory"], existing_folders)
# delete the database file
try:
fileutil.remove(db_fname)
except Exception as e:
print("Warning: unable to remove %s due to %s: %s"
% (quote_local_unicode_path(db_fname), e.__class__.__name__, str(e)), file=options.stderr)
# if this was the last magic-folder, disable them entirely
if not existing_folders:
parser = SafeConfigParser()
parser.read(os.path.join(options["node-directory"], u"tahoe.cfg"))
parser.remove_section("magic_folder")
with open(os.path.join(options["node-directory"], u"tahoe.cfg"), "w") as f:
parser.write(f)
return 0
class StatusOptions(BasedirOptions):
synopsis = ""
stdin = StringIO("")
optParameters = [
("name", "n", "default", "Name for the magic-folder to show status"),
]
def parseArgs(self):
BasedirOptions.parseArgs(self)
node_url_file = os.path.join(self['node-directory'], u"node.url")
with open(node_url_file, "r") as f:
self['node-url'] = f.read().strip()
def _get_json_for_fragment(options, fragment, method='GET', post_args=None):
nodeurl = options['node-url']
if nodeurl.endswith('/'):
nodeurl = nodeurl[:-1]
url = u'%s/%s' % (nodeurl, fragment)
if method == 'POST':
if post_args is None:
raise ValueError("Must pass post_args= for POST method")
body = urllib.urlencode(post_args)
else:
body = ''
if post_args is not None:
raise ValueError("post_args= only valid for POST method")
resp = do_http(method, url, body=body)
if isinstance(resp, BadResponse):
# specifically NOT using format_http_error() here because the
# URL is pretty sensitive (we're doing /uri/<key>).
raise RuntimeError(
"Failed to get json from '%s': %s" % (nodeurl, resp.error)
)
data = resp.read()
parsed = json.loads(data)
if parsed is None:
raise RuntimeError("No data from '%s'" % (nodeurl,))
return parsed
def _get_json_for_cap(options, cap):
return _get_json_for_fragment(
options,
'uri/%s?t=json' % urllib.quote(cap),
)
def _print_item_status(item, now, longest):
paddedname = (' ' * (longest - len(item['path']))) + item['path']
if 'failure_at' in item:
ts = datetime.fromtimestamp(item['started_at'])
prog = 'Failed %s (%s)' % (abbreviate_time(now - ts), ts)
elif item['percent_done'] < 100.0:
if 'started_at' not in item:
prog = 'not yet started'
else:
so_far = now - datetime.fromtimestamp(item['started_at'])
if so_far.seconds > 0.0:
rate = item['percent_done'] / so_far.seconds
if rate != 0:
time_left = (100.0 - item['percent_done']) / rate
prog = '%2.1f%% done, around %s left' % (
item['percent_done'],
abbreviate_time(time_left),
)
else:
time_left = None
prog = '%2.1f%% done' % (item['percent_done'],)
else:
prog = 'just started'
else:
prog = ''
for verb in ['finished', 'started', 'queued']:
keyname = verb + '_at'
if keyname in item:
when = datetime.fromtimestamp(item[keyname])
prog = '%s %s' % (verb, abbreviate_time(now - when))
break
print(" %s: %s" % (paddedname, prog))
def status(options):
nodedir = options["node-directory"]
stdout, stderr = options.stdout, options.stderr
magic_folders = load_magic_folders(os.path.join(options["node-directory"]))
with open(os.path.join(nodedir, u'private', u'api_auth_token'), 'rb') as f:
token = f.read()
print("Magic-folder status for '{}':".format(options["name"]), file=stdout)
if options["name"] not in magic_folders:
raise Exception(
"No such magic-folder '{}'".format(options["name"])
)
dmd_cap = magic_folders[options["name"]]["upload_dircap"]
collective_readcap = magic_folders[options["name"]]["collective_dircap"]
# do *all* our data-retrievals first in case there's an error
try:
dmd_data = _get_json_for_cap(options, dmd_cap)
remote_data = _get_json_for_cap(options, collective_readcap)
magic_data = _get_json_for_fragment(
options,
'magic_folder?t=json',
method='POST',
post_args=dict(
t='json',
name=options["name"],
token=token,
)
)
except Exception as e:
print("failed to retrieve data: %s" % str(e), file=stderr)
return 2
for d in [dmd_data, remote_data, magic_data]:
if isinstance(d, dict) and 'error' in d:
print("Error from server: %s" % d['error'], file=stderr)
print("This means we can't retrieve the remote shared directory.", file=stderr)
return 3
captype, dmd = dmd_data
if captype != 'dirnode':
print("magic_folder_dircap isn't a directory capability", file=stderr)
return 2
now = datetime.now()
print("Local files:", file=stdout)
for (name, child) in dmd['children'].items():
captype, meta = child
status = 'good'
size = meta['size']
created = datetime.fromtimestamp(meta['metadata']['tahoe']['linkcrtime'])
version = meta['metadata']['version']
nice_size = abbreviate_space(size)
nice_created = abbreviate_time(now - created)
if captype != 'filenode':
print("%20s: error, should be a filecap" % name, file=stdout)
continue
print(" %s (%s): %s, version=%s, created %s" % (name, nice_size, status, version, nice_created), file=stdout)
print(file=stdout)
print("Remote files:", file=stdout)
captype, collective = remote_data
for (name, data) in collective['children'].items():
if data[0] != 'dirnode':
print("Error: '%s': expected a dirnode, not '%s'" % (name, data[0]), file=stdout)
print(" %s's remote:" % name, file=stdout)
dmd = _get_json_for_cap(options, data[1]['ro_uri'])
if isinstance(dmd, dict) and 'error' in dmd:
print(" Error: could not retrieve directory", file=stdout)
continue
if dmd[0] != 'dirnode':
print("Error: should be a dirnode", file=stdout)
continue
for (n, d) in dmd[1]['children'].items():
if d[0] != 'filenode':
print("Error: expected '%s' to be a filenode." % (n,), file=stdout)
meta = d[1]
status = 'good'
size = meta['size']
created = datetime.fromtimestamp(meta['metadata']['tahoe']['linkcrtime'])
version = meta['metadata']['version']
nice_size = abbreviate_space(size)
nice_created = abbreviate_time(now - created)
print(" %s (%s): %s, version=%s, created %s" % (n, nice_size, status, version, nice_created), file=stdout)
if len(magic_data):
uploads = [item for item in magic_data if item['kind'] == 'upload']
downloads = [item for item in magic_data if item['kind'] == 'download']
longest = max([len(item['path']) for item in magic_data])
# maybe gate this with --show-completed option or something?
uploads = [item for item in uploads if item['status'] != 'success']
downloads = [item for item in downloads if item['status'] != 'success']
if len(uploads):
print()
print("Uploads:", file=stdout)
for item in uploads:
_print_item_status(item, now, longest)
if len(downloads):
print()
print("Downloads:", file=stdout)
for item in downloads:
_print_item_status(item, now, longest)
for item in magic_data:
if item['status'] == 'failure':
print("Failed:", item, file=stdout)
return 0
class MagicFolderCommand(BaseOptions):
subCommands = [
["create", None, CreateOptions, "Create a Magic Folder."],
["invite", None, InviteOptions, "Invite someone to a Magic Folder."],
["join", None, JoinOptions, "Join a Magic Folder."],
["leave", None, LeaveOptions, "Leave a Magic Folder."],
["status", None, StatusOptions, "Display status of uploads/downloads."],
["list", None, ListOptions, "List Magic Folders configured in this client."],
]
optFlags = [
["debug", "d", "Print full stack-traces"],
]
description = (
"A magic-folder has an owner who controls the writecap "
"containing a list of nicknames and readcaps. The owner can invite "
"new participants. Every participant has the writecap for their "
"own folder (the corresponding readcap is in the master folder). "
"All clients download files from all other participants using the "
"readcaps contained in the master magic-folder directory."
)
def postOptions(self):
if not hasattr(self, 'subOptions'):
raise usage.UsageError("must specify a subcommand")
def getSynopsis(self):
return "Usage: tahoe [global-options] magic-folder"
def getUsage(self, width=None):
t = BaseOptions.getUsage(self, width)
t += (
"Please run e.g. 'tahoe magic-folder create --help' for more "
"details on each subcommand.\n"
)
return t
subDispatch = {
"create": create,
"invite": invite,
"join": join,
"leave": leave,
"status": status,
"list": list_,
}
def do_magic_folder(options):
so = options.subOptions
so.stdout = options.stdout
so.stderr = options.stderr
f = subDispatch[options.subCommand]
try:
return f(so)
except Exception as e:
print("Error: %s" % (e,), file=options.stderr)
if options['debug']:
raise
subCommands = [
["magic-folder", None, MagicFolderCommand,
"Magic Folder subcommands: use 'tahoe magic-folder' for a list."],
]
dispatch = {
"magic-folder": do_magic_folder,
}

View File

@ -9,7 +9,7 @@ from twisted.internet import defer, task, threads
from allmydata.version_checks import get_package_versions_string
from allmydata.scripts.common import get_default_nodedir
from allmydata.scripts import debug, create_node, cli, \
stats_gatherer, admin, magic_folder_cli, tahoe_daemonize, tahoe_start, \
stats_gatherer, admin, tahoe_daemonize, tahoe_start, \
tahoe_stop, tahoe_restart, tahoe_run, tahoe_invite
from allmydata.util.encodingutil import quote_output, quote_local_unicode_path, get_io_encoding
from allmydata.util.eliotutil import (
@ -61,7 +61,6 @@ class Options(usage.Options):
+ process_control_commands
+ debug.subCommands
+ cli.subCommands
+ magic_folder_cli.subCommands
+ tahoe_invite.subCommands
)
@ -154,10 +153,6 @@ def dispatch(config,
# these are blocking, and must be run in a thread
f0 = cli.dispatch[command]
f = lambda so: threads.deferToThread(f0, so)
elif command in magic_folder_cli.dispatch:
# same
f0 = magic_folder_cli.dispatch[command]
f = lambda so: threads.deferToThread(f0, so)
elif command in tahoe_invite.dispatch:
f = tahoe_invite.dispatch[command]
else:

View File

@ -1,814 +0,0 @@
import json
import shutil
import os.path
import mock
import re
import time
from datetime import datetime
from eliot import (
log_call,
start_action,
)
from eliot.twisted import (
DeferredContext,
)
from twisted.trial import unittest
from twisted.internet import defer
from twisted.internet import reactor
from twisted.python import usage
from allmydata.util.assertutil import precondition
from allmydata.util import fileutil
from allmydata.scripts.common import get_aliases
from ..no_network import GridTestMixin
from ..common_util import parse_cli
from .common import CLITestMixin
from allmydata.test.common_util import NonASCIIPathMixin
from allmydata.scripts import magic_folder_cli
from allmydata.util.fileutil import abspath_expanduser_unicode
from allmydata.util.encodingutil import unicode_to_argv
from allmydata.frontends.magic_folder import MagicFolder
from allmydata import uri
from ...util.eliotutil import (
log_call_deferred,
)
class MagicFolderCLITestMixin(CLITestMixin, GridTestMixin, NonASCIIPathMixin):
def setUp(self):
GridTestMixin.setUp(self)
self.alice_nickname = self.unicode_or_fallback(u"Alice\u00F8", u"Alice", io_as_well=True)
self.bob_nickname = self.unicode_or_fallback(u"Bob\u00F8", u"Bob", io_as_well=True)
def do_create_magic_folder(self, client_num):
with start_action(action_type=u"create-magic-folder", client_num=client_num).context():
d = DeferredContext(
self.do_cli(
"magic-folder", "--debug", "create", "magic:",
client_num=client_num,
)
)
def _done(args):
(rc, stdout, stderr) = args
self.failUnlessEqual(rc, 0, stdout + stderr)
self.assertIn("Alias 'magic' created", stdout)
# self.failUnlessIn("joined new magic-folder", stdout)
# self.failUnlessIn("Successfully created magic-folder", stdout)
self.failUnlessEqual(stderr, "")
aliases = get_aliases(self.get_clientdir(i=client_num))
self.assertIn("magic", aliases)
self.failUnless(aliases["magic"].startswith("URI:DIR2:"))
d.addCallback(_done)
return d.addActionFinish()
def do_invite(self, client_num, nickname):
nickname_arg = unicode_to_argv(nickname)
action = start_action(
action_type=u"invite-to-magic-folder",
client_num=client_num,
nickname=nickname,
)
with action.context():
d = DeferredContext(
self.do_cli(
"magic-folder",
"invite",
"magic:",
nickname_arg,
client_num=client_num,
)
)
def _done(args):
(rc, stdout, stderr) = args
self.failUnlessEqual(rc, 0)
return (rc, stdout, stderr)
d.addCallback(_done)
return d.addActionFinish()
def do_list(self, client_num, json=False):
args = ("magic-folder", "list",)
if json:
args = args + ("--json",)
d = self.do_cli(*args, client_num=client_num)
def _done(args):
(rc, stdout, stderr) = args
return (rc, stdout, stderr)
d.addCallback(_done)
return d
def do_status(self, client_num, name=None):
args = ("magic-folder", "status",)
if name is not None:
args = args + ("--name", name)
d = self.do_cli(*args, client_num=client_num)
def _done(args):
(rc, stdout, stderr) = args
return (rc, stdout, stderr)
d.addCallback(_done)
return d
def do_join(self, client_num, local_dir, invite_code):
action = start_action(
action_type=u"join-magic-folder",
client_num=client_num,
local_dir=local_dir,
invite_code=invite_code,
)
with action.context():
precondition(isinstance(local_dir, unicode), local_dir=local_dir)
precondition(isinstance(invite_code, str), invite_code=invite_code)
local_dir_arg = unicode_to_argv(local_dir)
d = DeferredContext(
self.do_cli(
"magic-folder",
"join",
invite_code,
local_dir_arg,
client_num=client_num,
)
)
def _done(args):
(rc, stdout, stderr) = args
self.failUnlessEqual(rc, 0)
self.failUnlessEqual(stdout, "")
self.failUnlessEqual(stderr, "")
return (rc, stdout, stderr)
d.addCallback(_done)
return d.addActionFinish()
def do_leave(self, client_num):
d = self.do_cli("magic-folder", "leave", client_num=client_num)
def _done(args):
(rc, stdout, stderr) = args
self.failUnlessEqual(rc, 0)
return (rc, stdout, stderr)
d.addCallback(_done)
return d
def check_joined_config(self, client_num, upload_dircap):
"""Tests that our collective directory has the readonly cap of
our upload directory.
"""
action = start_action(action_type=u"check-joined-config")
with action.context():
collective_readonly_cap = self.get_caps_from_files(client_num)[0]
d = DeferredContext(
self.do_cli(
"ls", "--json",
collective_readonly_cap,
client_num=client_num,
)
)
def _done(args):
(rc, stdout, stderr) = args
self.failUnlessEqual(rc, 0)
return (rc, stdout, stderr)
d.addCallback(_done)
def test_joined_magic_folder(args):
(rc, stdout, stderr) = args
readonly_cap = unicode(uri.from_string(upload_dircap).get_readonly().to_string(), 'utf-8')
s = re.search(readonly_cap, stdout)
self.failUnless(s is not None)
return None
d.addCallback(test_joined_magic_folder)
return d.addActionFinish()
def get_caps_from_files(self, client_num):
from allmydata.frontends.magic_folder import load_magic_folders
folders = load_magic_folders(self.get_clientdir(i=client_num))
mf = folders["default"]
return mf['collective_dircap'], mf['upload_dircap']
@log_call
def check_config(self, client_num, local_dir):
client_config = fileutil.read(os.path.join(self.get_clientdir(i=client_num), "tahoe.cfg"))
mf_yaml = fileutil.read(os.path.join(self.get_clientdir(i=client_num), "private", "magic_folders.yaml"))
local_dir_utf8 = local_dir.encode('utf-8')
magic_folder_config = "[magic_folder]\nenabled = True"
self.assertIn(magic_folder_config, client_config)
self.assertIn(local_dir_utf8, mf_yaml)
def create_invite_join_magic_folder(self, nickname, local_dir):
nickname_arg = unicode_to_argv(nickname)
local_dir_arg = unicode_to_argv(local_dir)
# the --debug means we get real exceptions on failures
d = self.do_cli("magic-folder", "--debug", "create", "magic:", nickname_arg, local_dir_arg)
def _done(args):
(rc, stdout, stderr) = args
self.failUnlessEqual(rc, 0, stdout + stderr)
client = self.get_client()
self.collective_dircap, self.upload_dircap = self.get_caps_from_files(0)
self.collective_dirnode = client.create_node_from_uri(self.collective_dircap)
self.upload_dirnode = client.create_node_from_uri(self.upload_dircap)
d.addCallback(_done)
d.addCallback(lambda ign: self.check_joined_config(0, self.upload_dircap))
d.addCallback(lambda ign: self.check_config(0, local_dir))
return d
# XXX should probably just be "tearDown"...
@log_call_deferred(action_type=u"test:cli:magic-folder:cleanup")
def cleanup(self, res):
d = DeferredContext(defer.succeed(None))
def _clean(ign):
return self.magicfolder.disownServiceParent()
d.addCallback(_clean)
d.addCallback(lambda ign: res)
return d.result
def init_magicfolder(self, client_num, upload_dircap, collective_dircap, local_magic_dir, clock):
dbfile = abspath_expanduser_unicode(u"magicfolder_default.sqlite", base=self.get_clientdir(i=client_num))
magicfolder = MagicFolder(
client=self.get_client(client_num),
upload_dircap=upload_dircap,
collective_dircap=collective_dircap,
local_path_u=local_magic_dir,
dbfile=dbfile,
umask=0o077,
name='default',
clock=clock,
uploader_delay=0.2,
downloader_delay=0,
)
magicfolder.setServiceParent(self.get_client(client_num))
magicfolder.ready()
return magicfolder
def setup_alice_and_bob(self, alice_clock=reactor, bob_clock=reactor):
self.set_up_grid(num_clients=2, oneshare=True)
self.alice_magicfolder = None
self.bob_magicfolder = None
alice_magic_dir = abspath_expanduser_unicode(u"Alice-magic", base=self.basedir)
self.mkdir_nonascii(alice_magic_dir)
bob_magic_dir = abspath_expanduser_unicode(u"Bob-magic", base=self.basedir)
self.mkdir_nonascii(bob_magic_dir)
# Alice creates a Magic Folder, invites herself and joins.
d = self.do_create_magic_folder(0)
d.addCallback(lambda ign: self.do_invite(0, self.alice_nickname))
def get_invite_code(result):
self.invite_code = result[1].strip()
d.addCallback(get_invite_code)
d.addCallback(lambda ign: self.do_join(0, alice_magic_dir, self.invite_code))
def get_alice_caps(ign):
self.alice_collective_dircap, self.alice_upload_dircap = self.get_caps_from_files(0)
d.addCallback(get_alice_caps)
d.addCallback(lambda ign: self.check_joined_config(0, self.alice_upload_dircap))
d.addCallback(lambda ign: self.check_config(0, alice_magic_dir))
def get_Alice_magicfolder(result):
self.alice_magicfolder = self.init_magicfolder(0, self.alice_upload_dircap,
self.alice_collective_dircap,
alice_magic_dir, alice_clock)
return result
d.addCallback(get_Alice_magicfolder)
# Alice invites Bob. Bob joins.
d.addCallback(lambda ign: self.do_invite(0, self.bob_nickname))
def get_invite_code(result):
self.invite_code = result[1].strip()
d.addCallback(get_invite_code)
d.addCallback(lambda ign: self.do_join(1, bob_magic_dir, self.invite_code))
def get_bob_caps(ign):
self.bob_collective_dircap, self.bob_upload_dircap = self.get_caps_from_files(1)
d.addCallback(get_bob_caps)
d.addCallback(lambda ign: self.check_joined_config(1, self.bob_upload_dircap))
d.addCallback(lambda ign: self.check_config(1, bob_magic_dir))
def get_Bob_magicfolder(result):
self.bob_magicfolder = self.init_magicfolder(1, self.bob_upload_dircap,
self.bob_collective_dircap,
bob_magic_dir, bob_clock)
return result
d.addCallback(get_Bob_magicfolder)
return d
class ListMagicFolder(MagicFolderCLITestMixin, unittest.TestCase):
@defer.inlineCallbacks
def setUp(self):
yield super(ListMagicFolder, self).setUp()
self.basedir="mf_list"
self.set_up_grid(oneshare=True)
self.local_dir = os.path.join(self.basedir, "magic")
os.mkdir(self.local_dir)
self.abs_local_dir_u = abspath_expanduser_unicode(unicode(self.local_dir), long_path=False)
yield self.do_create_magic_folder(0)
(rc, stdout, stderr) = yield self.do_invite(0, self.alice_nickname)
invite_code = stdout.strip()
yield self.do_join(0, unicode(self.local_dir), invite_code)
@defer.inlineCallbacks
def tearDown(self):
yield super(ListMagicFolder, self).tearDown()
shutil.rmtree(self.basedir)
@defer.inlineCallbacks
def test_list(self):
rc, stdout, stderr = yield self.do_list(0)
self.failUnlessEqual(rc, 0)
self.assertIn("default:", stdout)
@defer.inlineCallbacks
def test_list_none(self):
yield self.do_leave(0)
rc, stdout, stderr = yield self.do_list(0)
self.failUnlessEqual(rc, 0)
self.assertIn("No magic-folders", stdout)
@defer.inlineCallbacks
def test_list_json(self):
rc, stdout, stderr = yield self.do_list(0, json=True)
self.failUnlessEqual(rc, 0)
res = json.loads(stdout)
self.assertEqual(
dict(default=dict(directory=self.abs_local_dir_u)),
res,
)
class StatusMagicFolder(MagicFolderCLITestMixin, unittest.TestCase):
@defer.inlineCallbacks
def setUp(self):
yield super(StatusMagicFolder, self).setUp()
self.basedir="mf_list"
self.set_up_grid(oneshare=True)
self.local_dir = os.path.join(self.basedir, "magic")
os.mkdir(self.local_dir)
self.abs_local_dir_u = abspath_expanduser_unicode(unicode(self.local_dir), long_path=False)
yield self.do_create_magic_folder(0)
(rc, stdout, stderr) = yield self.do_invite(0, self.alice_nickname)
invite_code = stdout.strip()
yield self.do_join(0, unicode(self.local_dir), invite_code)
@defer.inlineCallbacks
def tearDown(self):
yield super(StatusMagicFolder, self).tearDown()
shutil.rmtree(self.basedir)
@defer.inlineCallbacks
def test_status(self):
now = datetime.now()
then = now.replace(year=now.year - 5)
five_year_interval = (now - then).total_seconds()
def json_for_cap(options, cap):
if cap.startswith('URI:DIR2:'):
return (
'dirnode',
{
"children": {
"foo": ('filenode', {
"size": 1234,
"metadata": {
"tahoe": {
"linkcrtime": (time.time() - five_year_interval),
},
"version": 1,
},
"ro_uri": "read-only URI",
})
}
}
)
else:
return ('dirnode', {"children": {}})
jc = mock.patch(
"allmydata.scripts.magic_folder_cli._get_json_for_cap",
side_effect=json_for_cap,
)
def json_for_frag(options, fragment, method='GET', post_args=None):
return {}
jf = mock.patch(
"allmydata.scripts.magic_folder_cli._get_json_for_fragment",
side_effect=json_for_frag,
)
with jc, jf:
rc, stdout, stderr = yield self.do_status(0)
self.failUnlessEqual(rc, 0)
self.assertIn("default", stdout)
self.assertIn(
"foo (1.23 kB): good, version=1, created 5 years ago",
stdout,
)
@defer.inlineCallbacks
def test_status_child_not_dirnode(self):
def json_for_cap(options, cap):
if cap.startswith('URI:DIR2'):
return (
'dirnode',
{
"children": {
"foo": ('filenode', {
"size": 1234,
"metadata": {
"tahoe": {
"linkcrtime": 0.0,
},
"version": 1,
},
"ro_uri": "read-only URI",
})
}
}
)
elif cap == "read-only URI":
return {
"error": "bad stuff",
}
else:
return ('dirnode', {"children": {}})
jc = mock.patch(
"allmydata.scripts.magic_folder_cli._get_json_for_cap",
side_effect=json_for_cap,
)
def json_for_frag(options, fragment, method='GET', post_args=None):
return {}
jf = mock.patch(
"allmydata.scripts.magic_folder_cli._get_json_for_fragment",
side_effect=json_for_frag,
)
with jc, jf:
rc, stdout, stderr = yield self.do_status(0)
self.failUnlessEqual(rc, 0)
self.assertIn(
"expected a dirnode",
stdout + stderr,
)
@defer.inlineCallbacks
def test_status_error_not_dircap(self):
def json_for_cap(options, cap):
if cap.startswith('URI:DIR2:'):
return (
'filenode',
{}
)
else:
return ('dirnode', {"children": {}})
jc = mock.patch(
"allmydata.scripts.magic_folder_cli._get_json_for_cap",
side_effect=json_for_cap,
)
def json_for_frag(options, fragment, method='GET', post_args=None):
return {}
jf = mock.patch(
"allmydata.scripts.magic_folder_cli._get_json_for_fragment",
side_effect=json_for_frag,
)
with jc, jf:
rc, stdout, stderr = yield self.do_status(0)
self.failUnlessEqual(rc, 2)
self.assertIn(
"magic_folder_dircap isn't a directory capability",
stdout + stderr,
)
@defer.inlineCallbacks
def test_status_nothing(self):
rc, stdout, stderr = yield self.do_status(0, name="blam")
self.assertIn("No such magic-folder 'blam'", stderr)
class CreateMagicFolder(MagicFolderCLITestMixin, unittest.TestCase):
def test_create_and_then_invite_join(self):
self.basedir = "cli/MagicFolder/create-and-then-invite-join"
self.set_up_grid(oneshare=True)
local_dir = os.path.join(self.basedir, "magic")
os.mkdir(local_dir)
abs_local_dir_u = abspath_expanduser_unicode(unicode(local_dir), long_path=False)
d = self.do_create_magic_folder(0)
d.addCallback(lambda ign: self.do_invite(0, self.alice_nickname))
def get_invite_code_and_join(args):
(rc, stdout, stderr) = args
invite_code = stdout.strip()
return self.do_join(0, unicode(local_dir), invite_code)
d.addCallback(get_invite_code_and_join)
def get_caps(ign):
self.collective_dircap, self.upload_dircap = self.get_caps_from_files(0)
d.addCallback(get_caps)
d.addCallback(lambda ign: self.check_joined_config(0, self.upload_dircap))
d.addCallback(lambda ign: self.check_config(0, abs_local_dir_u))
return d
def test_create_error(self):
self.basedir = "cli/MagicFolder/create-error"
self.set_up_grid(oneshare=True)
d = self.do_cli("magic-folder", "create", "m a g i c:", client_num=0)
def _done(args):
(rc, stdout, stderr) = args
self.failIfEqual(rc, 0)
self.failUnlessIn("Alias names cannot contain spaces.", stderr)
d.addCallback(_done)
return d
@defer.inlineCallbacks
def test_create_duplicate_name(self):
self.basedir = "cli/MagicFolder/create-dup"
self.set_up_grid(oneshare=True)
rc, stdout, stderr = yield self.do_cli(
"magic-folder", "create", "magic:", "--name", "foo",
client_num=0,
)
self.assertEqual(rc, 0)
rc, stdout, stderr = yield self.do_cli(
"magic-folder", "create", "magic:", "--name", "foo",
client_num=0,
)
self.assertEqual(rc, 1)
self.assertIn(
"Already have a magic-folder named 'default'",
stderr
)
@defer.inlineCallbacks
def test_leave_wrong_folder(self):
self.basedir = "cli/MagicFolder/leave_wrong_folders"
yield self.set_up_grid(oneshare=True)
magic_dir = os.path.join(self.basedir, 'magic')
os.mkdir(magic_dir)
rc, stdout, stderr = yield self.do_cli(
"magic-folder", "create", "--name", "foo", "magic:", "my_name", magic_dir,
client_num=0,
)
self.assertEqual(rc, 0)
rc, stdout, stderr = yield self.do_cli(
"magic-folder", "leave", "--name", "bar",
client_num=0,
)
self.assertNotEqual(rc, 0)
self.assertIn(
"No such magic-folder 'bar'",
stdout + stderr,
)
@defer.inlineCallbacks
def test_leave_no_folder(self):
self.basedir = "cli/MagicFolder/leave_no_folders"
yield self.set_up_grid(oneshare=True)
magic_dir = os.path.join(self.basedir, 'magic')
os.mkdir(magic_dir)
rc, stdout, stderr = yield self.do_cli(
"magic-folder", "create", "--name", "foo", "magic:", "my_name", magic_dir,
client_num=0,
)
self.assertEqual(rc, 0)
rc, stdout, stderr = yield self.do_cli(
"magic-folder", "leave", "--name", "foo",
client_num=0,
)
self.assertEqual(rc, 0)
rc, stdout, stderr = yield self.do_cli(
"magic-folder", "leave", "--name", "foo",
client_num=0,
)
self.assertEqual(rc, 1)
self.assertIn(
"No magic-folders at all",
stderr,
)
@defer.inlineCallbacks
def test_leave_no_folders_at_all(self):
self.basedir = "cli/MagicFolder/leave_no_folders_at_all"
yield self.set_up_grid(oneshare=True)
rc, stdout, stderr = yield self.do_cli(
"magic-folder", "leave",
client_num=0,
)
self.assertEqual(rc, 1)
self.assertIn(
"No magic-folders at all",
stderr,
)
def test_create_invite_join(self):
self.basedir = "cli/MagicFolder/create-invite-join"
self.set_up_grid(oneshare=True)
local_dir = os.path.join(self.basedir, "magic")
abs_local_dir_u = abspath_expanduser_unicode(unicode(local_dir), long_path=False)
d = self.do_cli("magic-folder", "create", "magic:", "Alice", local_dir)
def _done(args):
(rc, stdout, stderr) = args
self.failUnlessEqual(rc, 0)
self.collective_dircap, self.upload_dircap = self.get_caps_from_files(0)
d.addCallback(_done)
d.addCallback(lambda ign: self.check_joined_config(0, self.upload_dircap))
d.addCallback(lambda ign: self.check_config(0, abs_local_dir_u))
return d
def test_help_synopsis(self):
self.basedir = "cli/MagicFolder/help_synopsis"
os.makedirs(self.basedir)
o = magic_folder_cli.CreateOptions()
o.parent = magic_folder_cli.MagicFolderCommand()
o.parent.getSynopsis()
def test_create_invite_join_failure(self):
self.basedir = "cli/MagicFolder/create-invite-join-failure"
os.makedirs(self.basedir)
o = magic_folder_cli.CreateOptions()
o.parent = magic_folder_cli.MagicFolderCommand()
o.parent['node-directory'] = self.basedir
try:
o.parseArgs("magic:", "Alice", "-foo")
except usage.UsageError as e:
self.failUnlessIn("cannot start with '-'", str(e))
else:
self.fail("expected UsageError")
def test_join_failure(self):
self.basedir = "cli/MagicFolder/create-join-failure"
os.makedirs(self.basedir)
o = magic_folder_cli.JoinOptions()
o.parent = magic_folder_cli.MagicFolderCommand()
o.parent['node-directory'] = self.basedir
try:
o.parseArgs("URI:invite+URI:code", "-foo")
except usage.UsageError as e:
self.failUnlessIn("cannot start with '-'", str(e))
else:
self.fail("expected UsageError")
def test_join_twice_failure(self):
self.basedir = "cli/MagicFolder/create-join-twice-failure"
os.makedirs(self.basedir)
self.set_up_grid(oneshare=True)
local_dir = os.path.join(self.basedir, "magic")
abs_local_dir_u = abspath_expanduser_unicode(unicode(local_dir), long_path=False)
d = self.do_create_magic_folder(0)
d.addCallback(lambda ign: self.do_invite(0, self.alice_nickname))
def get_invite_code_and_join(args):
(rc, stdout, stderr) = args
self.invite_code = stdout.strip()
return self.do_join(0, unicode(local_dir), self.invite_code)
d.addCallback(get_invite_code_and_join)
def get_caps(ign):
self.collective_dircap, self.upload_dircap = self.get_caps_from_files(0)
d.addCallback(get_caps)
d.addCallback(lambda ign: self.check_joined_config(0, self.upload_dircap))
d.addCallback(lambda ign: self.check_config(0, abs_local_dir_u))
def join_again(ignore):
return self.do_cli("magic-folder", "join", self.invite_code, local_dir, client_num=0)
d.addCallback(join_again)
def get_results(result):
(rc, out, err) = result
self.failUnlessEqual(out, "")
self.failUnlessIn("This client already has a magic-folder", err)
self.failIfEqual(rc, 0)
d.addCallback(get_results)
return d
def test_join_leave_join(self):
self.basedir = "cli/MagicFolder/create-join-leave-join"
os.makedirs(self.basedir)
self.set_up_grid(oneshare=True)
local_dir = os.path.join(self.basedir, "magic")
abs_local_dir_u = abspath_expanduser_unicode(unicode(local_dir), long_path=False)
self.invite_code = None
d = self.do_create_magic_folder(0)
d.addCallback(lambda ign: self.do_invite(0, self.alice_nickname))
def get_invite_code_and_join(args):
(rc, stdout, stderr) = args
self.failUnlessEqual(rc, 0)
self.invite_code = stdout.strip()
return self.do_join(0, unicode(local_dir), self.invite_code)
d.addCallback(get_invite_code_and_join)
def get_caps(ign):
self.collective_dircap, self.upload_dircap = self.get_caps_from_files(0)
d.addCallback(get_caps)
d.addCallback(lambda ign: self.check_joined_config(0, self.upload_dircap))
d.addCallback(lambda ign: self.check_config(0, abs_local_dir_u))
d.addCallback(lambda ign: self.do_leave(0))
d.addCallback(lambda ign: self.do_join(0, unicode(local_dir), self.invite_code))
def get_caps(ign):
self.collective_dircap, self.upload_dircap = self.get_caps_from_files(0)
d.addCallback(get_caps)
d.addCallback(lambda ign: self.check_joined_config(0, self.upload_dircap))
d.addCallback(lambda ign: self.check_config(0, abs_local_dir_u))
return d
def test_join_failures(self):
self.basedir = "cli/MagicFolder/create-join-failures"
os.makedirs(self.basedir)
self.set_up_grid(oneshare=True)
local_dir = os.path.join(self.basedir, "magic")
os.mkdir(local_dir)
abs_local_dir_u = abspath_expanduser_unicode(unicode(local_dir), long_path=False)
self.invite_code = None
d = self.do_create_magic_folder(0)
d.addCallback(lambda ign: self.do_invite(0, self.alice_nickname))
def get_invite_code_and_join(args):
(rc, stdout, stderr) = args
self.failUnlessEqual(rc, 0)
self.invite_code = stdout.strip()
return self.do_join(0, unicode(local_dir), self.invite_code)
d.addCallback(get_invite_code_and_join)
def get_caps(ign):
self.collective_dircap, self.upload_dircap = self.get_caps_from_files(0)
d.addCallback(get_caps)
d.addCallback(lambda ign: self.check_joined_config(0, self.upload_dircap))
d.addCallback(lambda ign: self.check_config(0, abs_local_dir_u))
def check_success(result):
(rc, out, err) = result
self.failUnlessEqual(rc, 0, out + err)
def check_failure(result):
(rc, out, err) = result
self.failIfEqual(rc, 0)
def leave(ign):
return self.do_cli("magic-folder", "leave", client_num=0)
d.addCallback(leave)
d.addCallback(check_success)
magic_folder_db_file = os.path.join(self.get_clientdir(i=0), u"private", u"magicfolder_default.sqlite")
def check_join_if_file(my_file):
fileutil.write(my_file, "my file data")
d2 = self.do_cli("magic-folder", "join", self.invite_code, local_dir, client_num=0)
d2.addCallback(check_failure)
return d2
for my_file in [magic_folder_db_file]:
d.addCallback(lambda ign, my_file: check_join_if_file(my_file), my_file)
d.addCallback(leave)
# we didn't successfully join, so leaving should be an error
d.addCallback(check_failure)
return d
class CreateErrors(unittest.TestCase):
def test_poll_interval(self):
e = self.assertRaises(usage.UsageError, parse_cli,
"magic-folder", "create", "--poll-interval=frog",
"alias:")
self.assertEqual(str(e), "--poll-interval must be a positive integer")
e = self.assertRaises(usage.UsageError, parse_cli,
"magic-folder", "create", "--poll-interval=-4",
"alias:")
self.assertEqual(str(e), "--poll-interval must be a positive integer")
def test_alias(self):
e = self.assertRaises(usage.UsageError, parse_cli,
"magic-folder", "create", "no-colon")
self.assertEqual(str(e), "An alias must end with a ':' character.")
def test_nickname(self):
e = self.assertRaises(usage.UsageError, parse_cli,
"magic-folder", "create", "alias:", "nickname")
self.assertEqual(str(e), "If NICKNAME is specified then LOCAL_DIR must also be specified.")
class InviteErrors(unittest.TestCase):
def test_alias(self):
e = self.assertRaises(usage.UsageError, parse_cli,
"magic-folder", "invite", "no-colon")
self.assertEqual(str(e), "An alias must end with a ':' character.")
class JoinErrors(unittest.TestCase):
def test_poll_interval(self):
e = self.assertRaises(usage.UsageError, parse_cli,
"magic-folder", "join", "--poll-interval=frog",
"code", "localdir")
self.assertEqual(str(e), "--poll-interval must be a positive integer")
e = self.assertRaises(usage.UsageError, parse_cli,
"magic-folder", "join", "--poll-interval=-2",
"code", "localdir")
self.assertEqual(str(e), "--poll-interval must be a positive integer")

View File

@ -37,7 +37,6 @@ from testtools.twistedsupport import (
)
import allmydata
import allmydata.frontends.magic_folder
import allmydata.util.log
from allmydata.node import OldConfigError, OldConfigOptionError, UnescapedHashError, _Config, create_node_dir
@ -658,104 +657,6 @@ class Basic(testutil.ReallyEqualMixin, testutil.NonASCIIPathMixin, unittest.Test
yield _check("helper.furl = None", None)
yield _check("helper.furl = pb://blah\n", "pb://blah")
@defer.inlineCallbacks
def test_create_magic_folder_service(self):
"""
providing magic-folder options actually creates a MagicFolder service
"""
boom = False
class Boom(Exception):
pass
class MockMagicFolder(allmydata.frontends.magic_folder.MagicFolder):
name = 'magic-folder'
def __init__(self, client, upload_dircap, collective_dircap, local_path_u, dbfile, umask, name,
inotify=None, uploader_delay=1.0, clock=None, downloader_delay=3):
if boom:
raise Boom()
service.MultiService.__init__(self)
self.client = client
self._umask = umask
self.upload_dircap = upload_dircap
self.collective_dircap = collective_dircap
self.local_dir = local_path_u
self.dbfile = dbfile
self.inotify = inotify
def startService(self):
self.running = True
def stopService(self):
self.running = False
def ready(self):
pass
self.patch(allmydata.frontends.magic_folder, 'MagicFolder', MockMagicFolder)
upload_dircap = "URI:DIR2:blah"
local_dir_u = self.unicode_or_fallback(u"loc\u0101l_dir", u"local_dir")
local_dir_utf8 = local_dir_u.encode('utf-8')
config = (BASECONFIG +
"[storage]\n" +
"enabled = false\n" +
"[magic_folder]\n" +
"enabled = true\n")
basedir1 = "test_client.Basic.test_create_magic_folder_service1"
os.mkdir(basedir1)
os.mkdir(local_dir_u)
# which config-entry should be missing?
fileutil.write(os.path.join(basedir1, "tahoe.cfg"),
config + "local.directory = " + local_dir_utf8 + "\n")
with self.assertRaises(IOError):
yield client.create_client(basedir1)
# local.directory entry missing .. but that won't be an error
# now, it'll just assume there are not magic folders
# .. hrm...should we make that an error (if enabled=true but
# there's not yaml AND no local.directory?)
fileutil.write(os.path.join(basedir1, "tahoe.cfg"), config)
fileutil.write(os.path.join(basedir1, "private", "magic_folder_dircap"), "URI:DIR2:blah")
fileutil.write(os.path.join(basedir1, "private", "collective_dircap"), "URI:DIR2:meow")
fileutil.write(os.path.join(basedir1, "tahoe.cfg"),
config.replace("[magic_folder]\n", "[drop_upload]\n"))
with self.assertRaises(OldConfigOptionError):
yield client.create_client(basedir1)
fileutil.write(os.path.join(basedir1, "tahoe.cfg"),
config + "local.directory = " + local_dir_utf8 + "\n")
c1 = yield client.create_client(basedir1)
magicfolder = c1.getServiceNamed('magic-folder')
self.failUnless(isinstance(magicfolder, MockMagicFolder), magicfolder)
self.failUnlessReallyEqual(magicfolder.client, c1)
self.failUnlessReallyEqual(magicfolder.upload_dircap, upload_dircap)
self.failUnlessReallyEqual(os.path.basename(magicfolder.local_dir), local_dir_u)
self.failUnless(magicfolder.inotify is None, magicfolder.inotify)
# It doesn't start until the client starts.
self.assertFalse(magicfolder.running)
# See above.
boom = True
basedir2 = "test_client.Basic.test_create_magic_folder_service2"
os.mkdir(basedir2)
os.mkdir(os.path.join(basedir2, "private"))
fileutil.write(os.path.join(basedir2, "tahoe.cfg"),
BASECONFIG +
"[magic_folder]\n" +
"enabled = true\n" +
"local.directory = " + local_dir_utf8 + "\n")
fileutil.write(os.path.join(basedir2, "private", "magic_folder_dircap"), "URI:DIR2:blah")
fileutil.write(os.path.join(basedir2, "private", "collective_dircap"), "URI:DIR2:meow")
with self.assertRaises(Boom):
yield client.create_client(basedir2)
def flush_but_dont_ignore(res):
d = flushEventualQueue()

View File

@ -1,171 +0,0 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for the inotify-alike implementation L{allmydata.watchdog}.
"""
# Note: See https://twistedmatrix.com/trac/ticket/8915 for a proposal
# to avoid all of this duplicated code from Twisted.
from twisted.internet import defer, reactor
from twisted.python import filepath, runtime
from allmydata.frontends.magic_folder import get_inotify_module
from .common import (
AsyncTestCase,
skipIf,
)
inotify = get_inotify_module()
@skipIf(runtime.platformType == "win32", "inotify does not yet work on windows")
class INotifyTests(AsyncTestCase):
"""
Define all the tests for the basic functionality exposed by
L{inotify.INotify}.
"""
def setUp(self):
self.dirname = filepath.FilePath(self.mktemp())
self.dirname.createDirectory()
self.inotify = inotify.INotify()
self.inotify.startReading()
self.addCleanup(self.inotify.stopReading)
return super(INotifyTests, self).setUp()
def _notificationTest(self, mask, operation, expectedPath=None):
"""
Test notification from some filesystem operation.
@param mask: The event mask to use when setting up the watch.
@param operation: A function which will be called with the
name of a file in the watched directory and which should
trigger the event.
@param expectedPath: Optionally, the name of the path which is
expected to come back in the notification event; this will
also be passed to C{operation} (primarily useful when the
operation is being done to the directory itself, not a
file in it).
@return: A L{Deferred} which fires successfully when the
expected event has been received or fails otherwise.
"""
if expectedPath is None:
expectedPath = self.dirname.child("foo.bar")
notified = defer.Deferred()
def cbNotified(result):
(watch, filename, events) = result
self.assertEqual(filename.asBytesMode(), expectedPath.asBytesMode())
self.assertTrue(events & mask)
self.inotify.ignore(self.dirname)
notified.addCallback(cbNotified)
def notify_event(*args):
notified.callback(args)
self.inotify.watch(
self.dirname, mask=mask,
callbacks=[notify_event])
operation(expectedPath)
return notified
def test_modify(self):
"""
Writing to a file in a monitored directory sends an
C{inotify.IN_MODIFY} event to the callback.
"""
def operation(path):
with path.open("w") as fObj:
fObj.write(b'foo')
return self._notificationTest(inotify.IN_MODIFY, operation)
def test_attrib(self):
"""
Changing the metadata of a file in a monitored directory
sends an C{inotify.IN_ATTRIB} event to the callback.
"""
def operation(path):
# Create the file.
path.touch()
# Modify the file's attributes.
path.touch()
return self._notificationTest(inotify.IN_ATTRIB, operation)
def test_closeWrite(self):
"""
Closing a file which was open for writing in a monitored
directory sends an C{inotify.IN_CLOSE_WRITE} event to the
callback.
"""
def operation(path):
path.open("w").close()
return self._notificationTest(inotify.IN_CLOSE_WRITE, operation)
def test_delete(self):
"""
Deleting a file in a monitored directory sends an
C{inotify.IN_DELETE} event to the callback.
"""
expectedPath = self.dirname.child("foo.bar")
expectedPath.touch()
notified = defer.Deferred()
def cbNotified(result):
(watch, filename, events) = result
self.assertEqual(filename.asBytesMode(), expectedPath.asBytesMode())
self.assertTrue(events & inotify.IN_DELETE)
notified.addCallback(cbNotified)
self.inotify.watch(
self.dirname, mask=inotify.IN_DELETE,
callbacks=[lambda *args: notified.callback(args)])
expectedPath.remove()
return notified
def test_humanReadableMask(self):
"""
L{inotify.humanReadableMask} translates all the possible event masks to a
human readable string.
"""
for mask, value in inotify._FLAG_TO_HUMAN:
self.assertEqual(inotify.humanReadableMask(mask)[0], value)
checkMask = (
inotify.IN_CLOSE_WRITE | inotify.IN_ACCESS | inotify.IN_OPEN)
self.assertEqual(
set(inotify.humanReadableMask(checkMask)),
set(['close_write', 'access', 'open']))
def test_noAutoAddSubdirectory(self):
"""
L{inotify.INotify.watch} with autoAdd==False will stop inotify
from watching subdirectories created under the watched one.
"""
def _callback(wp, fp, mask):
# We are notified before we actually process new
# directories, so we need to defer this check.
def _():
try:
self.assertFalse(self.inotify._isWatched(subdir))
d.callback(None)
except Exception:
d.errback()
reactor.callLater(0, _)
checkMask = inotify.IN_ISDIR | inotify.IN_CREATE
self.inotify.watch(
self.dirname, mask=checkMask, autoAdd=False,
callbacks=[_callback])
subdir = self.dirname.child('test')
d = defer.Deferred()
subdir.createDirectory()
return d

File diff suppressed because it is too large Load Diff

View File

@ -1,28 +0,0 @@
from twisted.trial import unittest
from allmydata import magicpath
class MagicPath(unittest.TestCase):
tests = {
u"Documents/work/critical-project/qed.txt": u"Documents@_work@_critical-project@_qed.txt",
u"Documents/emails/bunnyfufu@hoppingforest.net": u"Documents@_emails@_bunnyfufu@@hoppingforest.net",
u"foo/@/bar": u"foo@_@@@_bar",
}
def test_path2magic(self):
for test, expected in self.tests.items():
self.failUnlessEqual(magicpath.path2magic(test), expected)
def test_magic2path(self):
for expected, test in self.tests.items():
self.failUnlessEqual(magicpath.magic2path(test), expected)
def test_should_ignore(self):
self.failUnlessEqual(magicpath.should_ignore_file(u".bashrc"), True)
self.failUnlessEqual(magicpath.should_ignore_file(u"bashrc."), False)
self.failUnlessEqual(magicpath.should_ignore_file(u"forest/tree/branch/.bashrc"), True)
self.failUnlessEqual(magicpath.should_ignore_file(u"forest/tree/.branch/bashrc"), True)
self.failUnlessEqual(magicpath.should_ignore_file(u"forest/.tree/branch/bashrc"), True)
self.failUnlessEqual(magicpath.should_ignore_file(u"forest/tree/branch/bashrc"), False)

View File

@ -15,9 +15,6 @@ from testtools.matchers import (
BLACKLIST = {
"allmydata.test.check_load",
"allmydata.watchdog._watchdog_541",
"allmydata.watchdog.inotify",
"allmydata.windows.inotify",
"allmydata.windows.registry",
}

View File

@ -40,7 +40,7 @@ class TestStreamingLogs(unittest.TestCase):
messages.append(json.loads(msg))
proto.on("message", got_message)
@log_call(action_type=u"test:cli:magic-folder:cleanup")
@log_call(action_type=u"test:cli:some-exciting-action")
def do_a_thing():
pass

View File

@ -35,7 +35,6 @@ from allmydata.immutable import upload
from allmydata.immutable.downloader.status import DownloadStatus
from allmydata.dirnode import DirectoryNode
from allmydata.nodemaker import NodeMaker
from allmydata.frontends.magic_folder import QueuedItem
from allmydata.web import status
from allmydata.web.common import WebError, MultiFormatPage
from allmydata.util import fileutil, base32, hashutil
@ -125,29 +124,6 @@ class FakeUploader(service.Service):
return (self.helper_furl, self.helper_connected)
def create_test_queued_item(relpath_u, history=[]):
progress = mock.Mock()
progress.progress = 100.0
item = QueuedItem(relpath_u, progress, 1234)
for the_status, timestamp in history:
item.set_status(the_status, current_time=timestamp)
return item
class FakeMagicFolder(object):
def __init__(self):
self.uploader = FakeStatus()
self.downloader = FakeStatus()
def get_public_status(self):
return (
True,
[
'a magic-folder status message'
],
)
def build_one_ds():
ds = DownloadStatus("storage_index", 1234)
now = time.time()
@ -282,7 +258,6 @@ class FakeClient(_Client):
# don't upcall to Client.__init__, since we only want to initialize a
# minimal subset
service.MultiService.__init__(self)
self._magic_folders = dict()
self.all_contents = {}
self.nodeid = "fake_nodeid"
self.nickname = u"fake_nickname \u263A"
@ -997,79 +972,6 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
d.addCallback(_check)
return d
@defer.inlineCallbacks
def test_magicfolder_status_bad_token(self):
with self.assertRaises(Error):
yield self.POST(
'/magic_folder?t=json',
t='json',
name='default',
token='not the token you are looking for',
)
@defer.inlineCallbacks
def test_magicfolder_status_wrong_folder(self):
with self.assertRaises(Exception) as ctx:
yield self.POST(
'/magic_folder?t=json',
t='json',
name='a non-existent magic-folder',
token=self.s.get_auth_token(),
)
self.assertIn(
"Not Found",
str(ctx.exception)
)
@defer.inlineCallbacks
def test_magicfolder_status_success(self):
self.s._magic_folders['default'] = mf = FakeMagicFolder()
mf.uploader.status = [
create_test_queued_item(u"rel/uppath", [('done', 12345)])
]
mf.downloader.status = [
create_test_queued_item(u"rel/downpath", [('done', 23456)])
]
data = yield self.POST(
'/magic_folder?t=json',
t='json',
name='default',
token=self.s.get_auth_token(),
)
data = json.loads(data)
self.assertEqual(
data,
[
{
"status": "done",
"path": "rel/uppath",
"kind": "upload",
"percent_done": 100.0,
"done_at": 12345,
"size": 1234,
},
{
"status": "done",
"path": "rel/downpath",
"kind": "download",
"percent_done": 100.0,
"done_at": 23456,
"size": 1234,
},
]
)
@defer.inlineCallbacks
def test_magicfolder_root_success(self):
self.s._magic_folders['default'] = mf = FakeMagicFolder()
mf.uploader.status = [
create_test_queued_item(u"rel/path", [('done', 12345)])
]
data = yield self.GET(
'/',
)
del data
def test_status(self):
h = self.s.get_history()
dl_num = h.list_all_download_statuses()[0].get_counter()

View File

@ -16,15 +16,6 @@ __all__ = [
"opt_help_eliot_destinations",
"validateInstanceOf",
"validateSetMembership",
"MAYBE_NOTIFY",
"CALLBACK",
"INOTIFY_EVENTS",
"RELPATH",
"VERSION",
"LAST_UPLOADED_URI",
"LAST_DOWNLOADED_URI",
"LAST_DOWNLOADED_TIMESTAMP",
"PATHINFO",
]
from sys import (
@ -86,14 +77,6 @@ from twisted.internet.defer import (
)
from twisted.application.service import Service
from .fileutil import (
PathInfo,
)
from .fake_inotify import (
humanReadableMask,
)
def validateInstanceOf(t):
"""
Return an Eliot validator that requires values to be instances of ``t``.
@ -112,72 +95,6 @@ def validateSetMembership(s):
raise ValidationError("{} not in {}".format(v, s))
return validator
RELPATH = Field.for_types(
u"relpath",
[unicode],
u"The relative path of a file in a magic-folder.",
)
VERSION = Field.for_types(
u"version",
[int, long],
u"The version of the file.",
)
LAST_UPLOADED_URI = Field.for_types(
u"last_uploaded_uri",
[unicode, bytes, None],
u"The filecap to which this version of this file was uploaded.",
)
LAST_DOWNLOADED_URI = Field.for_types(
u"last_downloaded_uri",
[unicode, bytes, None],
u"The filecap from which the previous version of this file was downloaded.",
)
LAST_DOWNLOADED_TIMESTAMP = Field.for_types(
u"last_downloaded_timestamp",
[float, int, long],
u"(XXX probably not really, don't trust this) The timestamp of the last download of this file.",
)
PATHINFO = Field(
u"pathinfo",
lambda v: None if v is None else {
"isdir": v.isdir,
"isfile": v.isfile,
"islink": v.islink,
"exists": v.exists,
"size": v.size,
"mtime_ns": v.mtime_ns,
"ctime_ns": v.ctime_ns,
},
u"The metadata for this version of this file.",
validateInstanceOf((type(None), PathInfo)),
)
INOTIFY_EVENTS = Field(
u"inotify_events",
humanReadableMask,
u"Details about a filesystem event generating a notification event.",
validateInstanceOf((int, long)),
)
MAYBE_NOTIFY = ActionType(
u"filesystem:notification:maybe-notify",
[],
[],
u"A filesystem event is being considered for dispatch to an application handler.",
)
CALLBACK = ActionType(
u"filesystem:notification:callback",
[INOTIFY_EVENTS],
[],
u"A filesystem event is being dispatched to an application callback."
)
def eliot_logging_service(reactor, destinations):
"""
Parse the given Eliot destination descriptions and return an ``IService``

View File

@ -1,109 +0,0 @@
# Most of this is copied from Twisted 11.0. The reason for this hack is that
# twisted.internet.inotify can't be imported when the platform does not support inotify.
import six
if six.PY3:
long = int
# from /usr/src/linux/include/linux/inotify.h
IN_ACCESS = long(0x00000001) # File was accessed
IN_MODIFY = long(0x00000002) # File was modified
IN_ATTRIB = long(0x00000004) # Metadata changed
IN_CLOSE_WRITE = long(0x00000008) # Writeable file was closed
IN_CLOSE_NOWRITE = long(0x00000010) # Unwriteable file closed
IN_OPEN = long(0x00000020) # File was opened
IN_MOVED_FROM = long(0x00000040) # File was moved from X
IN_MOVED_TO = long(0x00000080) # File was moved to Y
IN_CREATE = long(0x00000100) # Subfile was created
IN_DELETE = long(0x00000200) # Subfile was delete
IN_DELETE_SELF = long(0x00000400) # Self was deleted
IN_MOVE_SELF = long(0x00000800) # Self was moved
IN_UNMOUNT = long(0x00002000) # Backing fs was unmounted
IN_Q_OVERFLOW = long(0x00004000) # Event queued overflowed
IN_IGNORED = long(0x00008000) # File was ignored
IN_ONLYDIR = 0x01000000 # only watch the path if it is a directory
IN_DONT_FOLLOW = 0x02000000 # don't follow a sym link
IN_MASK_ADD = 0x20000000 # add to the mask of an already existing watch
IN_ISDIR = 0x40000000 # event occurred against dir
IN_ONESHOT = 0x80000000 # only send event once
IN_CLOSE = IN_CLOSE_WRITE | IN_CLOSE_NOWRITE # closes
IN_MOVED = IN_MOVED_FROM | IN_MOVED_TO # moves
IN_CHANGED = IN_MODIFY | IN_ATTRIB # changes
IN_WATCH_MASK = (IN_MODIFY | IN_ATTRIB |
IN_CREATE | IN_DELETE |
IN_DELETE_SELF | IN_MOVE_SELF |
IN_UNMOUNT | IN_MOVED_FROM | IN_MOVED_TO)
_FLAG_TO_HUMAN = [
(IN_ACCESS, 'access'),
(IN_MODIFY, 'modify'),
(IN_ATTRIB, 'attrib'),
(IN_CLOSE_WRITE, 'close_write'),
(IN_CLOSE_NOWRITE, 'close_nowrite'),
(IN_OPEN, 'open'),
(IN_MOVED_FROM, 'moved_from'),
(IN_MOVED_TO, 'moved_to'),
(IN_CREATE, 'create'),
(IN_DELETE, 'delete'),
(IN_DELETE_SELF, 'delete_self'),
(IN_MOVE_SELF, 'move_self'),
(IN_UNMOUNT, 'unmount'),
(IN_Q_OVERFLOW, 'queue_overflow'),
(IN_IGNORED, 'ignored'),
(IN_ONLYDIR, 'only_dir'),
(IN_DONT_FOLLOW, 'dont_follow'),
(IN_MASK_ADD, 'mask_add'),
(IN_ISDIR, 'is_dir'),
(IN_ONESHOT, 'one_shot')
]
def humanReadableMask(mask):
"""
Auxiliary function that converts an hexadecimal mask into a series
of human readable flags.
"""
s = []
for k, v in _FLAG_TO_HUMAN:
if k & mask:
s.append(v)
return s
from eliot import start_action
# This class is not copied from Twisted; it acts as a mock.
class INotify(object):
def startReading(self):
pass
def stopReading(self):
pass
def loseConnection(self):
pass
def watch(self, filepath, mask=IN_WATCH_MASK, autoAdd=False, callbacks=None, recursive=False):
self.callbacks = callbacks
def event(self, filepath, mask):
with start_action(action_type=u"fake-inotify:event", path=filepath.asTextMode().path, mask=mask):
for cb in self.callbacks:
cb(None, filepath, mask)
__all__ = ["INotify", "humanReadableMask", "IN_WATCH_MASK", "IN_ACCESS",
"IN_MODIFY", "IN_ATTRIB", "IN_CLOSE_NOWRITE", "IN_CLOSE_WRITE",
"IN_OPEN", "IN_MOVED_FROM", "IN_MOVED_TO", "IN_CREATE",
"IN_DELETE", "IN_DELETE_SELF", "IN_MOVE_SELF", "IN_UNMOUNT",
"IN_Q_OVERFLOW", "IN_IGNORED", "IN_ONLYDIR", "IN_DONT_FOLLOW",
"IN_MASK_ADD", "IN_ISDIR", "IN_ONESHOT", "IN_CLOSE",
"IN_MOVED", "IN_CHANGED"]

View File

@ -1,16 +0,0 @@
"""
Hotfix for https://github.com/gorakhargosh/watchdog/issues/541
"""
from watchdog.observers.fsevents import FSEventsEmitter
# The class object has already been bundled up in the default arguments to
# FSEventsObserver.__init__. So mutate the class object (instead of replacing
# it with a safer version).
original_on_thread_stop = FSEventsEmitter.on_thread_stop
def safe_on_thread_stop(self):
if self.is_alive():
return original_on_thread_stop(self)
def patch():
FSEventsEmitter.on_thread_stop = safe_on_thread_stop

View File

@ -1,212 +0,0 @@
"""
An implementation of an inotify-like interface on top of the ``watchdog`` library.
"""
from __future__ import (
unicode_literals,
print_function,
absolute_import,
division,
)
__all__ = [
"humanReadableMask", "INotify",
"IN_WATCH_MASK", "IN_ACCESS", "IN_MODIFY", "IN_ATTRIB", "IN_CLOSE_NOWRITE",
"IN_CLOSE_WRITE", "IN_OPEN", "IN_MOVED_FROM", "IN_MOVED_TO", "IN_CREATE",
"IN_DELETE", "IN_DELETE_SELF", "IN_MOVE_SELF", "IN_UNMOUNT", "IN_ONESHOT",
"IN_Q_OVERFLOW", "IN_IGNORED", "IN_ONLYDIR", "IN_DONT_FOLLOW", "IN_MOVED",
"IN_MASK_ADD", "IN_ISDIR", "IN_CLOSE", "IN_CHANGED", "_FLAG_TO_HUMAN",
]
from watchdog.observers import Observer
from watchdog.events import (
FileSystemEvent,
FileSystemEventHandler, DirCreatedEvent, FileCreatedEvent,
DirDeletedEvent, FileDeletedEvent, FileModifiedEvent
)
from twisted.internet import reactor
from twisted.python.filepath import FilePath
from allmydata.util.fileutil import abspath_expanduser_unicode
from eliot import (
ActionType,
Message,
Field,
preserve_context,
start_action,
)
from allmydata.util.pollmixin import PollMixin
from allmydata.util.assertutil import _assert, precondition
from allmydata.util import encodingutil
from allmydata.util.fake_inotify import humanReadableMask, \
IN_WATCH_MASK, IN_ACCESS, IN_MODIFY, IN_ATTRIB, IN_CLOSE_NOWRITE, IN_CLOSE_WRITE, \
IN_OPEN, IN_MOVED_FROM, IN_MOVED_TO, IN_CREATE, IN_DELETE, IN_DELETE_SELF, \
IN_MOVE_SELF, IN_UNMOUNT, IN_Q_OVERFLOW, IN_IGNORED, IN_ONLYDIR, IN_DONT_FOLLOW, \
IN_MASK_ADD, IN_ISDIR, IN_ONESHOT, IN_CLOSE, IN_MOVED, IN_CHANGED, \
_FLAG_TO_HUMAN
from ..util.eliotutil import (
MAYBE_NOTIFY,
CALLBACK,
validateInstanceOf,
)
from . import _watchdog_541
_watchdog_541.patch()
NOT_STARTED = "NOT_STARTED"
STARTED = "STARTED"
STOPPING = "STOPPING"
STOPPED = "STOPPED"
_PATH = Field.for_types(
u"path",
[bytes, unicode],
u"The path an inotify event concerns.",
)
_EVENT = Field(
u"event",
lambda e: e.__class__.__name__,
u"The watchdog event that has taken place.",
validateInstanceOf(FileSystemEvent),
)
ANY_INOTIFY_EVENT = ActionType(
u"watchdog:inotify:any-event",
[_PATH, _EVENT],
[],
u"An inotify event is being dispatched.",
)
class INotifyEventHandler(FileSystemEventHandler):
def __init__(self, path, mask, callbacks, pending_delay):
FileSystemEventHandler.__init__(self)
self._path = path
self._mask = mask
self._callbacks = callbacks
self._pending_delay = pending_delay
self._pending = set()
def _maybe_notify(self, path, event):
with MAYBE_NOTIFY():
event_mask = IN_CHANGED
if isinstance(event, FileModifiedEvent):
event_mask = event_mask | IN_CLOSE_WRITE
event_mask = event_mask | IN_MODIFY
if isinstance(event, (DirCreatedEvent, FileCreatedEvent)):
# For our purposes, IN_CREATE is irrelevant.
event_mask = event_mask | IN_CLOSE_WRITE
if isinstance(event, (DirDeletedEvent, FileDeletedEvent)):
event_mask = event_mask | IN_DELETE
if event.is_directory:
event_mask = event_mask | IN_ISDIR
if not (self._mask & event_mask):
return
for cb in self._callbacks:
try:
with CALLBACK(inotify_events=event_mask):
cb(None, FilePath(path), event_mask)
except:
# Eliot already logged the exception for us.
# There's nothing else we can do about it here.
pass
def process(self, event):
event_filepath_u = event.src_path.decode(encodingutil.get_filesystem_encoding())
event_filepath_u = abspath_expanduser_unicode(event_filepath_u, base=self._path)
if event_filepath_u == self._path:
# ignore events for parent directory
return
self._maybe_notify(event_filepath_u, event)
def on_any_event(self, event):
with ANY_INOTIFY_EVENT(path=event.src_path, event=event):
reactor.callFromThread(
preserve_context(self.process),
event,
)
class INotify(PollMixin):
"""
I am a prototype INotify, made to work on Mac OS X (Darwin)
using the Watchdog python library. This is actually a simplified subset
of the twisted Linux INotify class because we do not utilize the watch mask
and only implement the following methods:
- watch
- startReading
- stopReading
- wait_until_stopped
- set_pending_delay
"""
def __init__(self):
self._pending_delay = 1.0
self.recursive_includes_new_subdirectories = False
self._callbacks = {}
self._watches = {}
self._state = NOT_STARTED
self._observer = Observer(timeout=self._pending_delay)
def set_pending_delay(self, delay):
Message.log(message_type=u"watchdog:inotify:set-pending-delay", delay=delay)
assert self._state != STARTED
self._pending_delay = delay
def startReading(self):
with start_action(action_type=u"watchdog:inotify:start-reading"):
assert self._state != STARTED
try:
# XXX twisted.internet.inotify doesn't require watches to
# be set before startReading is called.
# _assert(len(self._callbacks) != 0, "no watch set")
self._observer.start()
self._state = STARTED
except:
self._state = STOPPED
raise
def stopReading(self):
with start_action(action_type=u"watchdog:inotify:stop-reading"):
if self._state != STOPPED:
self._state = STOPPING
self._observer.unschedule_all()
self._observer.stop()
self._observer.join()
self._state = STOPPED
def wait_until_stopped(self):
return self.poll(lambda: self._state == STOPPED)
def _isWatched(self, path_u):
return path_u in self._callbacks.keys()
def ignore(self, path):
path_u = path.path
self._observer.unschedule(self._watches[path_u])
del self._callbacks[path_u]
del self._watches[path_u]
def watch(self, path, mask=IN_WATCH_MASK, autoAdd=False, callbacks=None, recursive=False):
precondition(isinstance(autoAdd, bool), autoAdd=autoAdd)
precondition(isinstance(recursive, bool), recursive=recursive)
assert autoAdd == False
path_u = path.path
if not isinstance(path_u, unicode):
path_u = path_u.decode('utf-8')
_assert(isinstance(path_u, unicode), path_u=path_u)
if path_u not in self._callbacks.keys():
self._callbacks[path_u] = callbacks or []
self._watches[path_u] = self._observer.schedule(
INotifyEventHandler(path_u, mask, self._callbacks[path_u], self._pending_delay),
path=path_u,
recursive=False,
)

View File

@ -1,52 +0,0 @@
import json
from allmydata.web.common import TokenOnlyWebApi, get_arg, WebError
class MagicFolderWebApi(TokenOnlyWebApi):
"""
I provide the web-based API for Magic Folder status etc.
"""
def __init__(self, client):
TokenOnlyWebApi.__init__(self, client)
self.client = client
def post_json(self, req):
req.setHeader("content-type", "application/json")
nick = get_arg(req, 'name', 'default')
try:
magic_folder = self.client._magic_folders[nick]
except KeyError:
raise WebError(
"No such magic-folder '{}'".format(nick),
404,
)
data = []
for item in magic_folder.uploader.get_status():
d = dict(
path=item.relpath_u,
status=item.status_history()[-1][0],
kind='upload',
)
for (status, ts) in item.status_history():
d[status + '_at'] = ts
d['percent_done'] = item.progress.progress
d['size'] = item.size
data.append(d)
for item in magic_folder.downloader.get_status():
d = dict(
path=item.relpath_u,
status=item.status_history()[-1][0],
kind='download',
)
for (status, ts) in item.status_history():
d[status + '_at'] = ts
d['percent_done'] = item.progress.progress
d['size'] = item.size
data.append(d)
return json.dumps(data)

View File

@ -21,7 +21,7 @@ from allmydata.version_checks import get_package_versions_string
from allmydata.util import log
from allmydata.interfaces import IFileNode
from allmydata.web import filenode, directory, unlinked, status
from allmydata.web import storage, magic_folder
from allmydata.web import storage
from allmydata.web.common import (
abbreviate_size,
getxmlfile,
@ -200,9 +200,6 @@ class Root(MultiFormatPage):
self.putChild("uri", URIHandler(client))
self.putChild("cap", URIHandler(client))
# handler for "/magic_folder" URIs
self.putChild("magic_folder", magic_folder.MagicFolderWebApi(client))
# Handler for everything beneath "/private", an area of the resource
# hierarchy which is only accessible with the private per-node API
# auth token.
@ -299,30 +296,6 @@ class Root(MultiFormatPage):
return description
def data_magic_folders(self, ctx, data):
return self.client._magic_folders.keys()
def render_magic_folder_row(self, ctx, data):
magic_folder = self.client._magic_folders[data]
(ok, messages) = magic_folder.get_public_status()
ctx.fillSlots("magic_folder_name", data)
if ok:
ctx.fillSlots("magic_folder_status", "yes")
ctx.fillSlots("magic_folder_status_alt", "working")
else:
ctx.fillSlots("magic_folder_status", "no")
ctx.fillSlots("magic_folder_status_alt", "not working")
status = T.ul(class_="magic-folder-status")
for msg in messages:
status[T.li[str(msg)]]
return ctx.tag[status]
def render_magic_folder(self, ctx, data):
if not self.client._magic_folders:
return T.p()
return ctx.tag
def render_services(self, ctx, data):
ul = T.ul()
try:

View File

@ -53,11 +53,6 @@ body {
.connection-status {
}
.magic-folder-status {
clear: left;
margin-left: 40px; /* width of status-indicator + margins */
}
.furl {
font-size: 0.8em;
word-wrap: break-word;

View File

@ -20,18 +20,6 @@
<li>Files Retrieved (mutable): <span n:render="retrieves" /></li>
</ul>
<h2>Magic Folder</h2>
<ul>
<li>Local Directories Monitored: <span n:render="magic_uploader_monitored" /></li>
<li>Files Uploaded: <span n:render="magic_uploader_succeeded" /></li>
<li>Files Queued for Upload: <span n:render="magic_uploader_queued" /></li>
<li>Failed Uploads: <span n:render="magic_uploader_failed" /></li>
<li>Files Downloaded: <span n:render="magic_downloader_succeeded" /></li>
<li>Files Queued for Download: <span n:render="magic_downloader_queued" /></li>
<li>Failed Downloads: <span n:render="magic_downloader_failed" /></li>
</ul>
<h2>Raw Stats:</h2>
<pre n:render="raw" />

View File

@ -1210,36 +1210,6 @@ class Statistics(MultiFormatPage):
return "%s files / %s bytes (%s)" % (files, bytes,
abbreviate_size(bytes))
def render_magic_uploader_monitored(self, ctx, data):
dirs = data["counters"].get("magic_folder.uploader.dirs_monitored", 0)
return "%s directories" % (dirs,)
def render_magic_uploader_succeeded(self, ctx, data):
# TODO: bytes uploaded
files = data["counters"].get("magic_folder.uploader.objects_succeeded", 0)
return "%s files" % (files,)
def render_magic_uploader_queued(self, ctx, data):
files = data["counters"].get("magic_folder.uploader.objects_queued", 0)
return "%s files" % (files,)
def render_magic_uploader_failed(self, ctx, data):
files = data["counters"].get("magic_folder.uploader.objects_failed", 0)
return "%s files" % (files,)
def render_magic_downloader_succeeded(self, ctx, data):
# TODO: bytes uploaded
files = data["counters"].get("magic_folder.downloader.objects_succeeded", 0)
return "%s files" % (files,)
def render_magic_downloader_queued(self, ctx, data):
files = data["counters"].get("magic_folder.downloader.objects_queued", 0)
return "%s files" % (files,)
def render_magic_downloader_failed(self, ctx, data):
files = data["counters"].get("magic_folder.downloader.objects_failed", 0)
return "%s files" % (files,)
def render_raw(self, ctx, data):
raw = pprint.pformat(data)
return ctx.tag[raw]

View File

@ -159,13 +159,6 @@
</div><!--/row-->
</div>
<div n:render="magic_folder" class="row-fluid">
<h2>Magic Folders</h2>
<div n:render="sequence" n:data="magic_folders">
<div n:pattern="item" n:render="magic_folder_row"><div class="status-indicator"><img><n:attr name="src">img/connected-<n:slot name="magic_folder_status" />.png</n:attr><n:attr name="alt"><n:slot name="magic_folder_status_alt" /></n:attr></img></div><h3><n:slot name="magic_folder_name" /></h3></div>
</div>
</div><!--/row-->
<div class="row-fluid">
<h2>
Connected to <span n:render="string" n:data="connected_storage_servers" />

View File

@ -1,379 +0,0 @@
# Windows near-equivalent to twisted.internet.inotify
# This should only be imported on Windows.
from __future__ import print_function
import six
import os, sys
from eliot import (
start_action,
Message,
log_call,
)
from twisted.internet import reactor
from twisted.internet.threads import deferToThread
from allmydata.util.fake_inotify import humanReadableMask, \
IN_WATCH_MASK, IN_ACCESS, IN_MODIFY, IN_ATTRIB, IN_CLOSE_NOWRITE, IN_CLOSE_WRITE, \
IN_OPEN, IN_MOVED_FROM, IN_MOVED_TO, IN_CREATE, IN_DELETE, IN_DELETE_SELF, \
IN_MOVE_SELF, IN_UNMOUNT, IN_Q_OVERFLOW, IN_IGNORED, IN_ONLYDIR, IN_DONT_FOLLOW, \
IN_MASK_ADD, IN_ISDIR, IN_ONESHOT, IN_CLOSE, IN_MOVED, IN_CHANGED
[humanReadableMask, \
IN_WATCH_MASK, IN_ACCESS, IN_MODIFY, IN_ATTRIB, IN_CLOSE_NOWRITE, IN_CLOSE_WRITE, \
IN_OPEN, IN_MOVED_FROM, IN_MOVED_TO, IN_CREATE, IN_DELETE, IN_DELETE_SELF, \
IN_MOVE_SELF, IN_UNMOUNT, IN_Q_OVERFLOW, IN_IGNORED, IN_ONLYDIR, IN_DONT_FOLLOW, \
IN_MASK_ADD, IN_ISDIR, IN_ONESHOT, IN_CLOSE, IN_MOVED, IN_CHANGED]
from allmydata.util.assertutil import _assert, precondition
from allmydata.util.encodingutil import quote_output
from allmydata.util import log, fileutil
from allmydata.util.pollmixin import PollMixin
from ..util.eliotutil import (
MAYBE_NOTIFY,
CALLBACK,
)
from ctypes import WINFUNCTYPE, WinError, windll, POINTER, byref, create_string_buffer, \
addressof, get_last_error
from ctypes.wintypes import BOOL, HANDLE, DWORD, LPCWSTR, LPVOID
if six.PY3:
long = int
# <http://msdn.microsoft.com/en-us/library/gg258116%28v=vs.85%29.aspx>
FILE_LIST_DIRECTORY = 1
# <http://msdn.microsoft.com/en-us/library/aa363858%28v=vs.85%29.aspx>
CreateFileW = WINFUNCTYPE(
HANDLE, LPCWSTR, DWORD, DWORD, LPVOID, DWORD, DWORD, HANDLE,
use_last_error=True
)(("CreateFileW", windll.kernel32))
FILE_SHARE_READ = 0x00000001
FILE_SHARE_WRITE = 0x00000002
FILE_SHARE_DELETE = 0x00000004
OPEN_EXISTING = 3
FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
# <http://msdn.microsoft.com/en-us/library/ms724211%28v=vs.85%29.aspx>
CloseHandle = WINFUNCTYPE(
BOOL, HANDLE,
use_last_error=True
)(("CloseHandle", windll.kernel32))
# <http://msdn.microsoft.com/en-us/library/aa365465%28v=vs.85%29.aspx>
ReadDirectoryChangesW = WINFUNCTYPE(
BOOL, HANDLE, LPVOID, DWORD, BOOL, DWORD, POINTER(DWORD), LPVOID, LPVOID,
use_last_error=True
)(("ReadDirectoryChangesW", windll.kernel32))
FILE_NOTIFY_CHANGE_FILE_NAME = 0x00000001
FILE_NOTIFY_CHANGE_DIR_NAME = 0x00000002
FILE_NOTIFY_CHANGE_ATTRIBUTES = 0x00000004
#FILE_NOTIFY_CHANGE_SIZE = 0x00000008
FILE_NOTIFY_CHANGE_LAST_WRITE = 0x00000010
FILE_NOTIFY_CHANGE_LAST_ACCESS = 0x00000020
#FILE_NOTIFY_CHANGE_CREATION = 0x00000040
FILE_NOTIFY_CHANGE_SECURITY = 0x00000100
# <http://msdn.microsoft.com/en-us/library/aa364391%28v=vs.85%29.aspx>
FILE_ACTION_ADDED = 0x00000001
FILE_ACTION_REMOVED = 0x00000002
FILE_ACTION_MODIFIED = 0x00000003
FILE_ACTION_RENAMED_OLD_NAME = 0x00000004
FILE_ACTION_RENAMED_NEW_NAME = 0x00000005
_action_to_string = {
FILE_ACTION_ADDED : "FILE_ACTION_ADDED",
FILE_ACTION_REMOVED : "FILE_ACTION_REMOVED",
FILE_ACTION_MODIFIED : "FILE_ACTION_MODIFIED",
FILE_ACTION_RENAMED_OLD_NAME : "FILE_ACTION_RENAMED_OLD_NAME",
FILE_ACTION_RENAMED_NEW_NAME : "FILE_ACTION_RENAMED_NEW_NAME",
}
_action_to_inotify_mask = {
FILE_ACTION_ADDED : IN_CREATE,
FILE_ACTION_REMOVED : IN_DELETE,
FILE_ACTION_MODIFIED : IN_CHANGED,
FILE_ACTION_RENAMED_OLD_NAME : IN_MOVED_FROM,
FILE_ACTION_RENAMED_NEW_NAME : IN_MOVED_TO,
}
INVALID_HANDLE_VALUE = 0xFFFFFFFF
FALSE = 0
TRUE = 1
class Event(object):
"""
* action: a FILE_ACTION_* constant (not a bit mask)
* filename: a Unicode string, giving the name relative to the watched directory
"""
def __init__(self, action, filename):
self.action = action
self.filename = filename
def __repr__(self):
return "Event(%r, %r)" % (_action_to_string.get(self.action, self.action), self.filename)
class FileNotifyInformation(object):
"""
I represent a buffer containing FILE_NOTIFY_INFORMATION structures, and can
iterate over those structures, decoding them into Event objects.
"""
def __init__(self, size=1024):
self.size = size
self.buffer = create_string_buffer(size)
address = addressof(self.buffer)
_assert(address & 3 == 0, "address 0x%X returned by create_string_buffer is not DWORD-aligned" % (address,))
self.data = None
def read_changes(self, hDirectory, recursive, filter):
bytes_returned = DWORD(0)
r = ReadDirectoryChangesW(hDirectory,
self.buffer,
self.size,
recursive,
filter,
byref(bytes_returned),
None, # NULL -> no overlapped I/O
None # NULL -> no completion routine
)
if r == 0:
self.data = None
raise WinError(get_last_error())
self.data = self.buffer.raw[:bytes_returned.value]
def __iter__(self):
# Iterator implemented as generator: <http://docs.python.org/library/stdtypes.html#generator-types>
if self.data is None:
return
pos = 0
while True:
bytes = self._read_dword(pos+8)
s = Event(self._read_dword(pos+4),
self.data[pos+12 : pos+12+bytes].decode('utf-16-le'))
Message.log(message_type="fni", info=repr(s))
next_entry_offset = self._read_dword(pos)
yield s
if next_entry_offset == 0:
break
pos = pos + next_entry_offset
def _read_dword(self, i):
# little-endian
return ( ord(self.data[i]) |
(ord(self.data[i+1]) << 8) |
(ord(self.data[i+2]) << 16) |
(ord(self.data[i+3]) << 24))
def _open_directory(path_u):
hDirectory = CreateFileW(path_u,
FILE_LIST_DIRECTORY, # access rights
FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
# don't prevent other processes from accessing
None, # no security descriptor
OPEN_EXISTING, # directory must already exist
FILE_FLAG_BACKUP_SEMANTICS, # necessary to open a directory
None # no template file
)
if hDirectory == INVALID_HANDLE_VALUE:
e = WinError(get_last_error())
raise OSError("Opening directory %s gave WinError: %s" % (quote_output(path_u), e))
return hDirectory
def simple_test():
path_u = u"test"
filter = FILE_NOTIFY_CHANGE_FILE_NAME | FILE_NOTIFY_CHANGE_DIR_NAME | FILE_NOTIFY_CHANGE_LAST_WRITE
recursive = TRUE
hDirectory = _open_directory(path_u)
fni = FileNotifyInformation()
print("Waiting...")
while True:
fni.read_changes(hDirectory, recursive, filter)
print(repr(fni.data))
for info in fni:
print(info)
def medium_test():
from twisted.python.filepath import FilePath
def print_(*event):
print(event)
notifier = INotify()
notifier.set_pending_delay(1.0)
IN_EXCL_UNLINK = long(0x04000000)
mask = ( IN_CREATE
| IN_CLOSE_WRITE
| IN_MOVED_TO
| IN_MOVED_FROM
| IN_DELETE
| IN_ONLYDIR
| IN_EXCL_UNLINK
)
notifier.watch(FilePath(u"test"), mask, callbacks=[print_], recursive=True)
notifier.startReading()
reactor.run()
NOT_STARTED = "NOT_STARTED"
STARTED = "STARTED"
STOPPING = "STOPPING"
STOPPED = "STOPPED"
class INotify(PollMixin):
def __init__(self):
self._state = NOT_STARTED
self._filter = None
self._callbacks = None
self._hDirectory = None
self._path = None
self._pending = set()
self._pending_delay = 1.0
self._pending_call = None
self.recursive_includes_new_subdirectories = True
def set_pending_delay(self, delay):
self._pending_delay = delay
def startReading(self):
deferToThread(self._thread)
return self.poll(lambda: self._state != NOT_STARTED)
def stopReading(self):
# FIXME race conditions
if self._state != STOPPED:
self._state = STOPPING
if self._pending_call:
self._pending_call.cancel()
self._pending_call = None
def wait_until_stopped(self):
try:
fileutil.write(os.path.join(self._path.path, u".ignore-me"), "")
except IOError:
pass
return self.poll(lambda: self._state == STOPPED)
def watch(self, path, mask=IN_WATCH_MASK, autoAdd=False, callbacks=None, recursive=False):
precondition(self._state == NOT_STARTED, "watch() can only be called before startReading()", state=self._state)
precondition(self._filter is None, "only one watch is supported")
precondition(isinstance(autoAdd, bool), autoAdd=autoAdd)
precondition(isinstance(recursive, bool), recursive=recursive)
#precondition(autoAdd == recursive, "need autoAdd and recursive to be the same", autoAdd=autoAdd, recursive=recursive)
self._path = path
path_u = path.path
if not isinstance(path_u, unicode):
path_u = path_u.decode(sys.getfilesystemencoding())
_assert(isinstance(path_u, unicode), path_u=path_u)
self._filter = FILE_NOTIFY_CHANGE_FILE_NAME | FILE_NOTIFY_CHANGE_DIR_NAME | FILE_NOTIFY_CHANGE_LAST_WRITE
if mask & (IN_ACCESS | IN_CLOSE_NOWRITE | IN_OPEN):
self._filter = self._filter | FILE_NOTIFY_CHANGE_LAST_ACCESS
if mask & IN_ATTRIB:
self._filter = self._filter | FILE_NOTIFY_CHANGE_ATTRIBUTES | FILE_NOTIFY_CHANGE_SECURITY
self._recursive = TRUE if recursive else FALSE
self._callbacks = callbacks or []
self._hDirectory = _open_directory(path_u)
def _thread(self):
try:
_assert(self._filter is not None, "no watch set")
# To call Twisted or Tahoe APIs, use reactor.callFromThread as described in
# <http://twistedmatrix.com/documents/current/core/howto/threading.html>.
fni = FileNotifyInformation()
while True:
self._state = STARTED
action = start_action(
action_type=u"read-changes",
directory=self._path.path,
recursive=self._recursive,
filter=self._filter,
)
try:
with action:
fni.read_changes(self._hDirectory, self._recursive, self._filter)
except WindowsError as e:
self._state = STOPPING
if self._check_stop():
return
for info in fni:
path = self._path.preauthChild(info.filename) # FilePath with Unicode path
if info.action == FILE_ACTION_MODIFIED and path.isdir():
Message.log(
message_type=u"filtering-out",
info=repr(info),
)
continue
else:
Message.log(
message_type=u"processing",
info=repr(info),
)
#mask = _action_to_inotify_mask.get(info.action, IN_CHANGED)
@log_call(
action_type=MAYBE_NOTIFY.action_type,
include_args=[],
include_result=False,
)
def _do_pending_calls():
event_mask = IN_CHANGED
self._pending_call = None
for path1 in self._pending:
if self._callbacks:
for cb in self._callbacks:
try:
with CALLBACK(inotify_events=event_mask):
cb(None, path1, event_mask)
except Exception as e2:
log.err(e2)
self._pending = set()
def _maybe_notify(path2):
if path2 not in self._pending:
self._pending.add(path2)
if self._state not in [STOPPING, STOPPED]:
_do_pending_calls()
# if self._pending_call is None and self._state not in [STOPPING, STOPPED]:
# self._pending_call = reactor.callLater(self._pending_delay, _do_pending_calls)
reactor.callFromThread(_maybe_notify, path)
if self._check_stop():
return
except Exception as e:
log.err(e)
self._state = STOPPED
raise
def _check_stop(self):
if self._state == STOPPING:
hDirectory = self._hDirectory
self._callbacks = None
self._hDirectory = None
CloseHandle(hDirectory)
self._state = STOPPED
if self._pending_call:
self._pending_call.cancel()
self._pending_call = None
return self._state == STOPPED