Merge pull request #475 from meejah/2909.backup-behavior.0

#2909 fix .backup file behavior and (some of) the incorrect .conflict cases (#2911)
This commit is contained in:
meejah 2018-05-01 15:52:10 -06:00 committed by GitHub
commit c9e00a988a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 400 additions and 115 deletions

View File

@ -23,6 +23,64 @@ def test_alice_writes_bob_receives(magic_folder):
return
def test_alice_writes_bob_receives_multiple(magic_folder):
"""
When Alice does a series of updates, Bob should just receive them
with no .backup or .conflict files being produced.
"""
alice_dir, bob_dir = magic_folder
unwanted_files = [
join(bob_dir, "multiple.backup"),
join(bob_dir, "multiple.conflict")
]
# first update
with open(join(alice_dir, "multiple"), "w") as f:
f.write("alice wrote this")
util.await_file_contents(
join(bob_dir, "multiple"), "alice wrote this",
error_if=unwanted_files,
)
# second update
with open(join(alice_dir, "multiple"), "w") as f:
f.write("someone changed their mind")
util.await_file_contents(
join(bob_dir, "multiple"), "someone changed their mind",
error_if=unwanted_files,
)
# third update
with open(join(alice_dir, "multiple"), "w") as f:
f.write("absolutely final version ship it")
util.await_file_contents(
join(bob_dir, "multiple"), "absolutely final version ship it",
error_if=unwanted_files,
)
# forth update, but both "at once" so one should conflict
time.sleep(2)
with open(join(alice_dir, "multiple"), "w") as f:
f.write("okay one more attempt")
with open(join(bob_dir, "multiple"), "w") as f:
f.write("...but just let me add")
bob_conflict = join(bob_dir, "multiple.conflict")
alice_conflict = join(alice_dir, "multiple.conflict")
found = util.await_files_exist([
bob_conflict,
alice_conflict,
])
assert len(found) > 0, "Should have found a conflict"
print("conflict found (as expected)")
def test_alice_writes_bob_receives_old_timestamp(magic_folder):
alice_dir, bob_dir = magic_folder
fname = join(alice_dir, "ts_file")
@ -116,11 +174,13 @@ def test_bob_creates_alice_deletes_bob_restores(magic_folder):
"bob wrote this"
)
# alice deletes it (so bob should as well
# alice deletes it (so bob should as well .. but keep a backup)
unlink(join(alice_dir, "boom"))
util.await_file_vanishes(join(bob_dir, "boom"))
assert exists(join(bob_dir, "boom.backup"))
# bob restore it, with new contents
unlink(join(bob_dir, "boom.backup"))
with open(join(bob_dir, "boom"), "w") as f:
f.write("bob wrote this again, because reasons")
@ -147,7 +207,7 @@ def test_bob_creates_alice_deletes_alice_restores(magic_folder):
"bob wrote this"
)
# alice deletes it (so bob should as well
# alice deletes it (so bob should as well)
unlink(join(alice_dir, "boom2"))
util.await_file_vanishes(join(bob_dir, "boom2"))
@ -155,32 +215,25 @@ def test_bob_creates_alice_deletes_alice_restores(magic_folder):
with open(join(alice_dir, "boom2"), "w") as f:
f.write("alice re-wrote this again, because reasons")
util.await_file_contents(
join(bob_dir, "boom2"),
"alice re-wrote this again, because reasons"
)
# this sometimes fails on Travis
@pytest.mark.xfail
def test_bob_conflicts_with_alice_fresh(magic_folder):
# both alice and bob make a file at "the same time".
alice_dir, bob_dir = magic_folder
# really, we fudge this a little: in reality, either alice or bob
# "wins" by uploading to the DMD first. So we make sure bob wins
# this one by giving him a massive head start
with open(join(bob_dir, 'alpha'), 'w') as f:
f.write("this is bob's alpha\n")
time.sleep(1.0)
with open(join(alice_dir, 'alpha'), 'w') as f:
f.write("this is alice's alpha\n")
# either alice or bob will "win" by uploading to the DMD first.
with open(join(bob_dir, 'alpha'), 'w') as f0, open(join(alice_dir, 'alpha'), 'w') as f1:
f0.write("this is bob's alpha\n")
f1.write("this is alice's alpha\n")
# since bob uploaded first, alice should see a backup
util.await_file_contents(join(alice_dir, 'alpha'), "this is bob's alpha\n")
util.await_file_contents(join(alice_dir, 'alpha.backup'), "this is alice's alpha\n")
util.await_file_contents(join(bob_dir, 'alpha'), "this is alice's alpha\n")
util.await_file_contents(join(bob_dir, 'alpha.backup'), "this is bob's alpha\n")
# there should be conflicts
_bob_conflicts_alice_await_conflicts('alpha', alice_dir, bob_dir)
# this sometimes fails on Travis
@pytest.mark.xfail
def test_bob_conflicts_with_alice_preexisting(magic_folder):
# both alice and bob edit a file at "the same time" (similar to
# above, but the file already exists before the edits)
@ -193,19 +246,39 @@ def test_bob_conflicts_with_alice_preexisting(magic_folder):
# both alice and bob now have a "beta" file, at version 0
# really, we fudge this a little: in reality, either alice or bob
# "wins" by uploading to the DMD first. So we make sure bob wins
# this one by giving him a massive head start
# either alice or bob will "win" by uploading to the DMD first
# (however, they should both detect a conflict)
with open(join(bob_dir, 'beta'), 'w') as f:
f.write("this is bob's beta\n")
time.sleep(1.0)
with open(join(alice_dir, 'beta'), 'w') as f:
f.write("this is alice's beta\n")
# since bob uploaded first, alice should see a backup
util.await_file_contents(join(bob_dir, 'beta'), "this is bob's beta\n")
util.await_file_contents(join(alice_dir, 'beta'), "this is bob's beta\n")
util.await_file_contents(join(alice_dir, 'beta.backup'), "this is alice's beta\n")
# both alice and bob should see a conflict
_bob_conflicts_alice_await_conflicts("beta", alice_dir, bob_dir)
def _bob_conflicts_alice_await_conflicts(name, alice_dir, bob_dir):
"""
shared code between _fresh and _preexisting conflict test
"""
found = util.await_files_exist(
[
join(bob_dir, '{}.conflict'.format(name)),
join(alice_dir, '{}.conflict'.format(name)),
],
await_all=True,
)
assert len(found) >= 1, "should be at least one conflict"
assert open(join(bob_dir, name), 'r').read() == "this is bob's {}\n".format(name)
assert open(join(alice_dir, name), 'r').read() == "this is alice's {}\n".format(name)
alice_conflict = join(alice_dir, '{}.conflict'.format(name))
bob_conflict = join(bob_dir, '{}.conflict'.format(name))
if exists(bob_conflict):
assert open(bob_conflict, 'r').read() == "this is alice's {}\n".format(name)
if exists(alice_conflict):
assert open(alice_conflict, 'r').read() == "this is bob's {}\n".format(name)
@pytest.inlineCallbacks

View File

@ -199,10 +199,69 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
return d
def await_file_contents(path, contents, timeout=15):
class UnwantedFilesException(Exception):
"""
While waiting for some files to appear, some undesired files
appeared instead (or in addition).
"""
def __init__(self, waiting, unwanted):
super(UnwantedFilesException, self).__init__(
u"While waiting for '{}', unwanted files appeared: {}".format(
waiting,
u', '.join(unwanted),
)
)
class ExpectedFileMismatchException(Exception):
"""
A file or files we wanted weren't found within the timeout.
"""
def __init__(self, path, timeout):
super(ExpectedFileMismatchException, self).__init__(
u"Contents of '{}' mismatched after {}s".format(path, timeout),
)
class ExpectedFileUnfoundException(Exception):
"""
A file or files we expected to find didn't appear within the
timeout.
"""
def __init__(self, path, timeout):
super(ExpectedFileUnfoundException, self).__init__(
u"Didn't find '{}' after {}s".format(path, timeout),
)
class FileShouldVanishException(Exception):
"""
A file or files we expected to disappear did not within the
timeout
"""
def __init__(self, path, timeout):
super(self, FileShouldVanishException).__init__(
u"'{}' still exists after {}s".format(path, timeout),
)
def await_file_contents(path, contents, timeout=15, error_if=None):
"""
wait up to `timeout` seconds for the file at `path` (any path-like
object) to have the exact content `contents`.
:param error_if: if specified, a list of additional paths; if any
of these paths appear an Exception is raised.
"""
start_time = time.time()
while time.time() - start_time < timeout:
print(" waiting for '{}'".format(path))
if error_if and any([exists(p) for p in error_if]):
raise UnwantedFilesException(
waiting=path,
unwanted=[p for p in error_if if exists(p)],
)
if exists(path):
try:
with open(path, 'r') as f:
@ -217,8 +276,33 @@ def await_file_contents(path, contents, timeout=15):
print(" got: {}".format(current.replace('\n', ' ')))
time.sleep(1)
if exists(path):
raise Exception("Contents of '{}' mismatched after {}s".format(path, timeout))
raise Exception("Didn't find '{}' after {}s".format(path, timeout))
raise ExpectedFileMismatchException(path, timeout)
raise ExpectedFileUnfoundException(path, timeout)
def await_files_exist(paths, timeout=15, await_all=False):
"""
wait up to `timeout` seconds for any of the paths to exist; when
any exist, a list of all found filenames is returned. Otherwise,
an Exception is raised
"""
start_time = time.time()
while time.time() - start_time < 15.0:
print(" waiting for: {}".format(' '.join(paths)))
found = [p for p in paths if exists(p)]
print("found: {}".format(found))
if await_all:
if len(found) == len(paths):
return found
else:
if len(found) > 0:
return found
time.sleep(1)
if await_all:
nice_paths = ' and '.join(paths)
else:
nice_paths = ' or '.join(paths)
raise ExpectedFileUnfoundException(nice_paths, timeout)
def await_file_vanishes(path, timeout=10):
@ -228,4 +312,4 @@ def await_file_vanishes(path, timeout=10):
if not exists(path):
return
time.sleep(1)
raise Exception("'{}' still exists after {}s".format(path, timeout))
raise FileShouldVanishException(path, timeout)

View File

@ -456,7 +456,6 @@ class QueueMixin(HookMixin):
seconds.
"""
while not self._stopped:
self._log("doing iteration")
d = task.deferLater(self._clock, self._scan_delay(), lambda: None)
# adds items to our deque
@ -469,10 +468,8 @@ class QueueMixin(HookMixin):
# *before* we trigger the 'iteration' hook, so that hook
# can successfully advance the Clock and bypass the delay
# if required (e.g. in the tests).
self._log("one loop; call_hook iteration %r" % self)
self._call_hook(None, 'iteration')
if not self._stopped:
self._log("waiting... %r" % d)
yield d
self._log("stopped")
@ -485,7 +482,6 @@ class QueueMixin(HookMixin):
@defer.inlineCallbacks
def _process_deque(self):
self._log("_process_deque %r" % (self._deque,))
# process everything currently in the queue. we're turning it
# into a list so that if any new items get added while we're
# processing, they'll not run until next time)
@ -500,7 +496,8 @@ class QueueMixin(HookMixin):
# completed)
self._in_progress.extend(to_process)
self._log("%d items to process" % len(to_process), )
if to_process:
self._log("%d items to process" % len(to_process), )
for item in to_process:
self._process_history.appendleft(item)
self._in_progress.remove(item)
@ -825,8 +822,31 @@ class Uploader(QueueMixin):
'last_downloaded_timestamp': last_downloaded_timestamp,
'user_mtime': pathinfo.ctime_ns / 1000000000.0, # why are we using ns in PathInfo??
}
# from the Fire Dragons part of the spec:
# Later, in response to a local filesystem change at a given path, the
# Magic Folder client reads the last-downloaded record associated with
# that path (if any) from the database and then uploads the current
# file. When it links the uploaded file into its client DMD, it
# includes the ``last_downloaded_uri`` field in the metadata of the
# directory entry, overwriting any existing field of that name. If
# there was no last-downloaded record associated with the path, this
# field is omitted.
# Note that ``last_downloaded_uri`` field does *not* record the URI of
# the uploaded file (which would be redundant); it records the URI of
# the last download before the local change that caused the upload.
# The field will be absent if the file has never been downloaded by
# this client (i.e. if it was created on this client and no change
# by any other client has been detected).
# XXX currently not actually true: it will record the
# LAST THING we wrote to (or saw on) disk (not
# necessarily downloaded?)
if db_entry.last_downloaded_uri is not None:
metadata['last_downloaded_uri'] = db_entry.last_downloaded_uri
if db_entry.last_uploaded_uri is not None:
metadata['last_uploaded_uri'] = db_entry.last_uploaded_uri
empty_uploadable = Data("", self._client.convergence)
d2 = self._upload_dirnode.add_file(
@ -842,9 +862,14 @@ class Uploader(QueueMixin):
# last_downloaded_uri to the filecap so that we don't
# immediately re-download it when we start up next
last_downloaded_uri = metadata.get('last_downloaded_uri', filecap)
self._db.did_upload_version(relpath_u, new_version, filecap,
last_downloaded_uri, last_downloaded_timestamp,
pathinfo)
self._db.did_upload_version(
relpath_u,
new_version,
filecap,
last_downloaded_uri,
last_downloaded_timestamp,
pathinfo,
)
self._count('files_uploaded')
d2.addCallback(_add_db_entry)
d2.addCallback(lambda ign: True)
@ -901,8 +926,11 @@ class Uploader(QueueMixin):
'last_downloaded_timestamp': last_downloaded_timestamp,
'user_mtime': pathinfo.mtime_ns / 1000000000.0, # why are we using ns in PathInfo??
}
if db_entry is not None and db_entry.last_downloaded_uri is not None:
metadata['last_downloaded_uri'] = db_entry.last_downloaded_uri
if db_entry is not None:
if db_entry.last_downloaded_uri is not None:
metadata['last_downloaded_uri'] = db_entry.last_downloaded_uri
if db_entry.last_uploaded_uri is not None:
metadata['last_uploaded_uri'] = db_entry.last_uploaded_uri
uploadable = FileName(unicode_from_filepath(fp), self._client.convergence)
d2 = self._upload_dirnode.add_file(
@ -917,10 +945,15 @@ class Uploader(QueueMixin):
# if we're uploading a file, we want to set
# last_downloaded_uri to the filecap so that we don't
# immediately re-download it when we start up next
last_downloaded_uri = metadata.get('last_downloaded_uri', filecap)
self._db.did_upload_version(relpath_u, new_version, filecap,
last_downloaded_uri, last_downloaded_timestamp,
pathinfo)
last_downloaded_uri = filecap
self._db.did_upload_version(
relpath_u,
new_version,
filecap,
last_downloaded_uri,
last_downloaded_timestamp,
pathinfo
)
self._count('files_uploaded')
return True
d2.addCallback(_add_db_entry)
@ -992,7 +1025,6 @@ class WriteFileMixin(object):
precondition_abspath(abspath_u)
replacement_path_u = abspath_u + u".tmp" # FIXME more unique
backup_path_u = abspath_u + u".backup"
if now is None:
now = time.time()
@ -1013,9 +1045,10 @@ class WriteFileMixin(object):
return self._rename_conflicted_file(abspath_u, replacement_path_u)
else:
try:
fileutil.replace_file(abspath_u, replacement_path_u, backup_path_u)
fileutil.replace_file(abspath_u, replacement_path_u)
return abspath_u
except fileutil.ConflictError:
except fileutil.ConflictError as e:
self._log("overwrite becomes _conflict: {}".format(e))
return self._rename_conflicted_file(abspath_u, replacement_path_u)
def _rename_conflicted_file(self, abspath_u, replacement_path_u):
@ -1122,15 +1155,11 @@ class Downloader(QueueMixin, WriteFileMixin):
We check the remote metadata version against our magic-folder db version number;
latest version wins.
"""
self._log("_should_download(%r, %r, %r)" % (relpath_u, remote_version, remote_uri))
if magicpath.should_ignore_file(relpath_u):
self._log("nope")
return False
self._log("yep")
db_entry = self._db.get_db_entry(relpath_u)
if db_entry is None:
return True
self._log("version %r" % (db_entry.version,))
if db_entry.version < remote_version:
return True
if db_entry.last_downloaded_uri is None and _is_empty_filecap(self._client, remote_uri):
@ -1292,12 +1321,12 @@ class Downloader(QueueMixin, WriteFileMixin):
fp = self._get_filepath(item.relpath_u)
abspath_u = unicode_from_filepath(fp)
conflict_path_u = self._get_conflicted_filename(abspath_u)
last_uploaded_uri = item.metadata.get('last_uploaded_uri', None)
d = defer.succeed(False)
def do_update_db(written_abspath_u):
filecap = item.file_node.get_uri()
last_uploaded_uri = item.metadata.get('last_uploaded_uri', None)
if not item.file_node.get_size():
filecap = None # ^ is an empty file
last_downloaded_uri = filecap
@ -1308,8 +1337,12 @@ class Downloader(QueueMixin, WriteFileMixin):
raise Exception("downloaded object %s disappeared" % quote_local_unicode_path(written_abspath_u))
self._db.did_upload_version(
item.relpath_u, item.metadata['version'], last_uploaded_uri,
last_downloaded_uri, last_downloaded_timestamp, written_pathinfo,
item.relpath_u,
item.metadata['version'],
last_uploaded_uri,
last_downloaded_uri,
last_downloaded_timestamp,
written_pathinfo,
)
self._count('objects_downloaded')
item.set_status('success', self._clock.seconds())
@ -1326,22 +1359,65 @@ class Downloader(QueueMixin, WriteFileMixin):
raise ConflictError("download failed: already conflicted: %r" % (item.relpath_u,))
d.addCallback(fail)
else:
# Let ``last_downloaded_uri`` be the field of that name obtained from
# the directory entry metadata for ``foo`` in Bob's DMD (this field
# may be absent). Then the algorithm is:
# * 2a. Attempt to "stat" ``foo`` to get its *current statinfo* (size
# in bytes, ``mtime``, and ``ctime``). If Alice has no local copy
# of ``foo``, classify as an overwrite.
current_statinfo = get_pathinfo(abspath_u)
is_conflict = False
db_entry = self._db.get_db_entry(item.relpath_u)
dmd_last_downloaded_uri = item.metadata.get('last_downloaded_uri', None)
dmd_last_uploaded_uri = item.metadata.get('last_uploaded_uri', None)
# * 2b. Read the following information for the path ``foo`` from the
# local magic folder db:
# * the *last-seen statinfo*, if any (this is the size in
# bytes, ``mtime``, and ``ctime`` stored in the ``local_files``
# table when the file was last uploaded);
# * the ``last_uploaded_uri`` field of the ``local_files`` table
# for this file, which is the URI under which the file was last
# uploaded.
if db_entry:
if dmd_last_downloaded_uri is not None and db_entry.last_downloaded_uri is not None:
if dmd_last_downloaded_uri != db_entry.last_downloaded_uri:
if not _is_empty_filecap(self._client, dmd_last_downloaded_uri):
# * 2c. If any of the following are true, then classify as a conflict:
# * i. there are pending notifications of changes to ``foo``;
# * ii. the last-seen statinfo is either absent (i.e. there is
# no entry in the database for this path), or different from the
# current statinfo;
if current_statinfo.exists:
self._log("checking conflicts {}".format(item.relpath_u))
if (db_entry.mtime_ns != current_statinfo.mtime_ns or \
db_entry.ctime_ns != current_statinfo.ctime_ns or \
db_entry.size != current_statinfo.size):
is_conflict = True
self._log("conflict because local change0")
if db_entry.last_downloaded_uri is None \
or db_entry.last_uploaded_uri is None \
or dmd_last_downloaded_uri is None:
# we've never downloaded anything before for this
# file, but the other side might have created a new
# file "at the same time"
if db_entry.version >= item.metadata['version']:
self._log("conflict because my version >= remote version")
is_conflict = True
self._count('objects_conflicted')
elif dmd_last_uploaded_uri is not None and dmd_last_uploaded_uri != db_entry.last_uploaded_uri:
elif dmd_last_downloaded_uri != db_entry.last_downloaded_uri:
is_conflict = True
self._log("conflict because dmd_last_downloaded_uri != db_entry.last_downloaded_uri")
else: # no local db_entry .. but has the file appeared locally meantime?
if current_statinfo.exists:
is_conflict = True
self._count('objects_conflicted')
elif self._is_upload_pending(item.relpath_u):
is_conflict = True
self._count('objects_conflicted')
self._log("conflict because local change1")
if is_conflict:
self._count('objects_conflicted')
if item.relpath_u.endswith(u"/"):
if item.metadata.get('deleted', False):

View File

@ -854,6 +854,69 @@ class MagicFolderAliceBobTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, Rea
)
yield self._check_version_in_dmd(self.bob_magicfolder, u"blam", 0)
@defer.inlineCallbacks
def test_conflict_local_change_fresh(self):
alice_fname = os.path.join(self.alice_magic_dir, 'localchange0')
bob_fname = os.path.join(self.bob_magic_dir, 'localchange0')
# alice creates a file, bob downloads it
alice_proc = self.alice_magicfolder.uploader.set_hook('processed')
bob_proc = self.bob_magicfolder.downloader.set_hook('processed')
yield self.alice_fileops.write(alice_fname, 'contents0\n')
yield iterate(self.alice_magicfolder) # for windows
# before bob downloads, we make a local file for bob by the
# same name
with open(bob_fname, 'w') as f:
f.write("not the right stuff")
yield iterate_uploader(self.alice_magicfolder)
yield alice_proc # alice uploads
yield iterate_downloader(self.bob_magicfolder)
yield bob_proc # bob downloads
# ...so now bob should produce a conflict
self.assertTrue(os.path.exists(bob_fname + '.conflict'))
@defer.inlineCallbacks
def test_conflict_local_change_existing(self):
alice_fname = os.path.join(self.alice_magic_dir, 'localchange1')
bob_fname = os.path.join(self.bob_magic_dir, 'localchange1')
# alice creates a file, bob downloads it
alice_proc = self.alice_magicfolder.uploader.set_hook('processed')
bob_proc = self.bob_magicfolder.downloader.set_hook('processed')
yield self.alice_fileops.write(alice_fname, 'contents0\n')
yield iterate(self.alice_magicfolder) # for windows
yield iterate_uploader(self.alice_magicfolder)
yield alice_proc # alice uploads
yield iterate_downloader(self.bob_magicfolder)
yield bob_proc # bob downloads
# alice creates a new change
alice_proc = self.alice_magicfolder.uploader.set_hook('processed')
bob_proc = self.bob_magicfolder.downloader.set_hook('processed')
yield self.alice_fileops.write(alice_fname, 'contents1\n')
yield iterate(self.alice_magicfolder) # for windows
# before bob downloads, make a local change
with open(bob_fname, "w") as f:
f.write("bob's local change")
yield iterate_uploader(self.alice_magicfolder)
yield alice_proc # alice uploads
yield iterate_downloader(self.bob_magicfolder)
yield bob_proc # bob downloads
# ...so now bob should produce a conflict
self.assertTrue(os.path.exists(bob_fname + '.conflict'))
@defer.inlineCallbacks
def test_alice_delete_and_restore(self):
@ -1113,7 +1176,7 @@ class MagicFolderAliceBobTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, Rea
d.addCallback(lambda ign: self._check_version_in_local_db(self.alice_magicfolder, u"file1", 3))
d.addCallback(lambda ign: self._check_downloader_count('objects_failed', 0, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 1, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 1, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 0, magic=self.alice_magicfolder))
def Alice_conflicts_with_Bobs_last_downloaded_uri():
if _debug: print "Alice conflicts with Bob\n"
@ -1131,11 +1194,11 @@ class MagicFolderAliceBobTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, Rea
return d2
d.addCallback(lambda ign: Alice_conflicts_with_Bobs_last_downloaded_uri())
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 4))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 0))
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 4, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 1, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 1, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_failed', 0, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 1, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 0, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('files_uploaded', 1, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('objects_succeeded', 1, magic=self.bob_magicfolder))
@ -1151,7 +1214,7 @@ class MagicFolderAliceBobTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, Rea
d.addCallback(lambda ign: self._check_version_in_dmd(self.alice_magicfolder, u"file2", 0))
d.addCallback(lambda ign: self._check_version_in_local_db(self.alice_magicfolder, u"file2", 0))
d.addCallback(lambda ign: self._check_downloader_count('objects_failed', 0, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 1, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 0, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('files_uploaded', 1, magic=self.bob_magicfolder))
def advance(ign):
@ -1177,17 +1240,16 @@ class MagicFolderAliceBobTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, Rea
yield iterate(self.bob_magicfolder)
d.addCallback(lambda ign: _wait_for(None, Bob_to_rewrite_file2, alice=False))
d.addCallback(lambda ign: self._check_version_in_dmd(self.bob_magicfolder, u"file2", 1))
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 5))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 0))
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 5, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 1, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('objects_failed', 0, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('objects_succeeded', 2, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('files_uploaded', 2, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('objects_queued', 0, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('directories_created', 0, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 1, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 0, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_failed', 0, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 2, magic=self.alice_magicfolder))
# d.addCallback(lambda ign: self._check_uploader_count('files_uploaded', 1, magic=self.bob_magicfolder))
# XXX here we advance the clock and then test again to make sure no values are monotonically increasing
# with each queue turn ;-p
@ -1195,16 +1257,16 @@ class MagicFolderAliceBobTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, Rea
bob_clock.advance(6)
d.addCallback(lambda ign: self._check_version_in_dmd(self.bob_magicfolder, u"file2", 1))
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 5))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 0))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 1))
d.addCallback(lambda ign: self._check_uploader_count('objects_failed', 0, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('objects_succeeded', 2, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('files_uploaded', 2, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('objects_queued', 0, magic=self.bob_magicfolder))
## d.addCallback(lambda ign: self._check_uploader_count('objects_queued', 0, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('directories_created', 0, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 1, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 0, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_failed', 0, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 2, magic=self.alice_magicfolder))
## d.addCallback(lambda ign: self._check_uploader_count('files_uploaded', 1, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('files_uploaded', 2, magic=self.bob_magicfolder))
def Alice_conflicts_with_Bobs_last_uploaded_uri():
if _debug: print "Alice conflicts with Bob\n"
@ -1228,9 +1290,9 @@ class MagicFolderAliceBobTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, Rea
d.addCallback(lambda ign: self._check_uploader_count('objects_failed', 0, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('objects_succeeded', 2, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('files_uploaded', 2, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('objects_queued', 0, magic=self.bob_magicfolder))
## d.addCallback(lambda ign: self._check_uploader_count('objects_queued', 0, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('directories_created', 0, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 1, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 0, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_failed', 0, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 2, magic=self.alice_magicfolder))
@ -1242,7 +1304,7 @@ class MagicFolderAliceBobTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, Rea
d.addCallback(foo)
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 2, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 1, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 0, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 1))
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 6))
@ -1259,7 +1321,7 @@ class MagicFolderAliceBobTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, Rea
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 7))
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 2, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 1))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 1, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 0, magic=self.alice_magicfolder))
@defer.inlineCallbacks
def Bob_to_rewrite_file3():
@ -1278,7 +1340,7 @@ class MagicFolderAliceBobTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, Rea
d.addCallback(lambda ign: self._check_uploader_count('files_uploaded', 3, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('objects_queued', 0, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_uploader_count('directories_created', 0, magic=self.bob_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 1, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_conflicted', 0, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_failed', 0, magic=self.alice_magicfolder))
d.addCallback(lambda ign: self._check_downloader_count('objects_downloaded', 3, magic=self.alice_magicfolder))
@ -1750,10 +1812,9 @@ class MockTest(SingleMagicFolderTestMixin, unittest.TestCase):
conflicted_path = local_file + u".conflict"
self.failIf(os.path.exists(conflicted_path))
# At this point, the backup file should exist with content "foo"
# no backup
backup_path = local_file + u".backup"
self.failUnless(os.path.exists(backup_path))
self.failUnlessEqual(fileutil.read(backup_path), "foo")
self.failIf(os.path.exists(backup_path))
# .tmp file shouldn't exist
self.failIf(os.path.exists(local_file + u".tmp"))

View File

@ -510,40 +510,27 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
workdir = fileutil.abspath_expanduser_unicode(u"test_replace_file")
fileutil.make_dirs(workdir)
backup_path = os.path.join(workdir, "backup")
replaced_path = os.path.join(workdir, "replaced")
replacement_path = os.path.join(workdir, "replacement")
# when none of the files exist
self.failUnlessRaises(fileutil.ConflictError, fileutil.replace_file, replaced_path, replacement_path, backup_path)
self.failUnlessRaises(fileutil.ConflictError, fileutil.replace_file, replaced_path, replacement_path)
# when only replaced exists
fileutil.write(replaced_path, "foo")
self.failUnlessRaises(fileutil.ConflictError, fileutil.replace_file, replaced_path, replacement_path, backup_path)
self.failUnlessRaises(fileutil.ConflictError, fileutil.replace_file, replaced_path, replacement_path)
self.failUnlessEqual(fileutil.read(replaced_path), "foo")
# when both replaced and replacement exist, but not backup
# when both replaced and replacement exist
fileutil.write(replacement_path, "bar")
fileutil.replace_file(replaced_path, replacement_path, backup_path)
self.failUnlessEqual(fileutil.read(backup_path), "foo")
fileutil.replace_file(replaced_path, replacement_path)
self.failUnlessEqual(fileutil.read(replaced_path), "bar")
self.failIf(os.path.exists(replacement_path))
# when only replacement exists
os.remove(backup_path)
os.remove(replaced_path)
fileutil.write(replacement_path, "bar")
fileutil.replace_file(replaced_path, replacement_path, backup_path)
self.failUnlessEqual(fileutil.read(replaced_path), "bar")
self.failIf(os.path.exists(replacement_path))
self.failIf(os.path.exists(backup_path))
# when replaced, replacement and backup all exist
fileutil.write(replaced_path, "foo")
fileutil.write(replacement_path, "bar")
fileutil.write(backup_path, "bak")
fileutil.replace_file(replaced_path, replacement_path, backup_path)
self.failUnlessEqual(fileutil.read(backup_path), "foo")
fileutil.replace_file(replaced_path, replacement_path)
self.failUnlessEqual(fileutil.read(replaced_path), "bar")
self.failIf(os.path.exists(replacement_path))

View File

@ -82,6 +82,9 @@ class INotify(object):
def stopReading(self):
pass
def loseConnection(self):
pass
def watch(self, filepath, mask=IN_WATCH_MASK, autoAdd=False, callbacks=None, recursive=False):
self.callbacks = callbacks

View File

@ -243,7 +243,9 @@ def du(basedir):
def move_into_place(source, dest):
"""Atomically replace a file, or as near to it as the platform allows.
The dest file may or may not exist."""
if "win32" in sys.platform.lower():
if "win32" in sys.platform.lower() and os.path.exists(source):
# we check for source existing since we don't want to nuke the
# dest unless we'll succeed at moving the target into place
remove_if_possible(dest)
os.rename(source, dest)
@ -613,12 +615,13 @@ if sys.platform == "win32":
def rename_no_overwrite(source_path, dest_path):
os.rename(source_path, dest_path)
def replace_file(replaced_path, replacement_path, backup_path):
def replace_file(replaced_path, replacement_path):
precondition_abspath(replaced_path)
precondition_abspath(replacement_path)
precondition_abspath(backup_path)
r = ReplaceFileW(replaced_path, replacement_path, backup_path,
# no "backup" path (the first None) because we don't want to
# create a backup file
r = ReplaceFileW(replaced_path, replacement_path, None,
REPLACEFILE_IGNORE_MERGE_ERRORS, None, None)
if r == 0:
# The UnableToUnlinkReplacementError case does not happen on Windows;
@ -628,7 +631,7 @@ if sys.platform == "win32":
raise ConflictError("WinError: %s" % (WinError(err),))
try:
rename_no_overwrite(replacement_path, replaced_path)
move_into_place(replacement_path, replaced_path)
except EnvironmentError:
reraise(ConflictError)
else:
@ -640,24 +643,22 @@ else:
except EnvironmentError:
reraise(UnableToUnlinkReplacementError)
def replace_file(replaced_path, replacement_path, backup_path):
def replace_file(replaced_path, replacement_path):
precondition_abspath(replaced_path)
precondition_abspath(replacement_path)
precondition_abspath(backup_path)
if not os.path.exists(replacement_path):
raise ConflictError("Replacement file not found: %r" % (replacement_path,))
try:
os.rename(replaced_path, backup_path)
move_into_place(replacement_path, replaced_path)
except OSError as e:
if e.errno != ENOENT:
raise
try:
rename_no_overwrite(replacement_path, replaced_path)
except EnvironmentError:
reraise(ConflictError)
PathInfo = namedtuple('PathInfo', 'isdir isfile islink exists size mtime_ns ctime_ns')
def seconds_to_ns(t):