consolidate: create multiple numbered backups of the original Archives directory, not just the first time

This commit is contained in:
Brian Warner 2009-03-12 16:04:27 -07:00
parent c4c6a62954
commit 760688a224
2 changed files with 12 additions and 5 deletions

View File

@ -89,10 +89,14 @@ class Consolidator:
if not systems:
self.msg("No systems under /Backups, nothing to consolidate")
return 0
if not os.path.exists(self.backupfile):
f = open(self.backupfile, "wb")
pickle.dump(backup_data, f)
f.close()
backupfile = self.backupfile
counter = 0
while os.path.exists(backupfile):
backupfile = self.backupfile + "." + str(counter)
counter += 1
f = open(backupfile, "wb")
pickle.dump(backup_data, f)
f.close()
for name, archives_dircap in sorted(systems.items()):
self.do_system(name, archives_dircap)

View File

@ -179,7 +179,7 @@ class Consolidate(GridTestMixin, CLITestMixin, unittest.TestCase):
"7 dirs created, 2 used as-is, 13 reused")
self.failUnless(os.path.exists(dbfile))
self.failUnless(os.path.exists(backupfile))
backup = pickle.load(open(backupfile, "rb"))
self.first_backup = backup = pickle.load(open(backupfile, "rb"))
self.failUnless(u"fluxx" in backup["systems"])
self.failUnless(u"fluxx" in backup["archives"])
adata = backup["archives"]["fluxx"]
@ -198,6 +198,9 @@ class Consolidate(GridTestMixin, CLITestMixin, unittest.TestCase):
self.failUnlessEqual(last.strip(),
"system done, "
"0 dirs created, 0 used as-is, 0 reused")
backup = pickle.load(open(backupfile, "rb"))
self.failUnlessEqual(backup, self.first_backup)
self.failUnless(os.path.exists(backupfile + ".0"))
d.addCallback(_check_consolidate_output2)
d.addCallback(lambda ignored: self.build_manifest(self.nodes["Archives"]))