mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-04-19 08:16:19 +00:00
Add magicfolderdb.py.
Signed-off-by: Daira Hopwood <daira@jacaranda.org>
This commit is contained in:
parent
615859acc1
commit
5754c01cb5
@ -19,7 +19,7 @@ from allmydata.util.encodingutil import listdir_filepath, to_filepath, \
|
||||
extend_filepath, unicode_from_filepath, unicode_segments_from, \
|
||||
quote_filepath, quote_local_unicode_path, quote_output, FilenameEncodingError
|
||||
from allmydata.immutable.upload import FileName, Data
|
||||
from allmydata import backupdb, magicpath
|
||||
from allmydata import magicfolderdb, magicpath
|
||||
|
||||
|
||||
IN_EXCL_UNLINK = 0x04000000L
|
||||
@ -31,13 +31,13 @@ def get_inotify_module():
|
||||
elif runtime.platform.supportsINotify():
|
||||
from twisted.internet import inotify
|
||||
else:
|
||||
raise NotImplementedError("filesystem notification needed for drop-upload is not supported.\n"
|
||||
raise NotImplementedError("filesystem notification needed for Magic Folder is not supported.\n"
|
||||
"This currently requires Linux or Windows.")
|
||||
return inotify
|
||||
except (ImportError, AttributeError) as e:
|
||||
log.msg(e)
|
||||
if sys.platform == "win32":
|
||||
raise NotImplementedError("filesystem notification needed for drop-upload is not supported.\n"
|
||||
raise NotImplementedError("filesystem notification needed for Magic Folder is not supported.\n"
|
||||
"Windows support requires at least Vista, and has only been tested on Windows 7.")
|
||||
raise
|
||||
|
||||
@ -51,7 +51,7 @@ class MagicFolder(service.MultiService):
|
||||
|
||||
service.MultiService.__init__(self)
|
||||
|
||||
db = backupdb.get_backupdb(dbfile, create_version=(backupdb.MAGIC_FOLDER_SCHEMA_v3, 3))
|
||||
db = magicfolderdb.get_magicfolderdb(dbfile, create_version=(magicfolderdb.SCHEMA_v1, 1))
|
||||
if db is None:
|
||||
return Failure(Exception('ERROR: Unable to load magic folder db.'))
|
||||
|
||||
|
139
src/allmydata/magicfolderdb.py
Normal file
139
src/allmydata/magicfolderdb.py
Normal file
@ -0,0 +1,139 @@
|
||||
|
||||
import sys
|
||||
|
||||
from allmydata.util.dbutil import get_db, DBError
|
||||
|
||||
|
||||
# magic-folder db schema version 1
|
||||
SCHEMA_v1 = """
|
||||
CREATE TABLE version
|
||||
(
|
||||
version INTEGER -- contains one row, set to 1
|
||||
);
|
||||
|
||||
CREATE TABLE local_files
|
||||
(
|
||||
path VARCHAR(1024) PRIMARY KEY, -- UTF-8 filename relative to local magic folder dir
|
||||
-- note that size is before mtime and ctime here, but after in function parameters
|
||||
size INTEGER, -- ST_SIZE, or NULL if the file has been deleted
|
||||
mtime REAL, -- ST_MTIME
|
||||
ctime REAL, -- ST_CTIME
|
||||
version INTEGER,
|
||||
last_uploaded_uri VARCHAR(256) UNIQUE, -- URI:CHK:...
|
||||
last_downloaded_uri VARCHAR(256) UNIQUE, -- URI:CHK:...
|
||||
last_downloaded_timestamp REAL
|
||||
);
|
||||
"""
|
||||
|
||||
|
||||
def get_magicfolderdb(dbfile, stderr=sys.stderr,
|
||||
create_version=(SCHEMA_v1, 1), just_create=False):
|
||||
# Open or create the given backupdb file. The parent directory must
|
||||
# exist.
|
||||
try:
|
||||
(sqlite3, db) = get_db(dbfile, stderr, create_version,
|
||||
just_create=just_create, dbname="magicfolderdb")
|
||||
if create_version[1] in (1, 2):
|
||||
return MagicFolderDB(sqlite3, db)
|
||||
else:
|
||||
print >>stderr, "invalid magicfolderdb schema version specified"
|
||||
return None
|
||||
except DBError, e:
|
||||
print >>stderr, e
|
||||
return None
|
||||
|
||||
|
||||
class MagicFolderDB(object):
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self, sqlite_module, connection):
|
||||
self.sqlite_module = sqlite_module
|
||||
self.connection = connection
|
||||
self.cursor = connection.cursor()
|
||||
|
||||
def check_file_db_exists(self, path):
|
||||
"""I will tell you if a given file has an entry in my database or not
|
||||
by returning True or False.
|
||||
"""
|
||||
c = self.cursor
|
||||
c.execute("SELECT size,mtime,ctime"
|
||||
" FROM local_files"
|
||||
" WHERE path=?",
|
||||
(path,))
|
||||
row = self.cursor.fetchone()
|
||||
if not row:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def get_all_relpaths(self):
|
||||
"""
|
||||
Retrieve a set of all relpaths of files that have had an entry in magic folder db
|
||||
(i.e. that have been downloaded at least once).
|
||||
"""
|
||||
self.cursor.execute("SELECT path FROM local_files")
|
||||
rows = self.cursor.fetchall()
|
||||
return set([r[0] for r in rows])
|
||||
|
||||
def get_last_downloaded_uri(self, relpath_u):
|
||||
"""
|
||||
Return the last downloaded uri recorded in the magic folder db.
|
||||
If none are found then return None.
|
||||
"""
|
||||
c = self.cursor
|
||||
c.execute("SELECT last_downloaded_uri"
|
||||
" FROM local_files"
|
||||
" WHERE path=?",
|
||||
(relpath_u,))
|
||||
row = self.cursor.fetchone()
|
||||
if not row:
|
||||
return None
|
||||
else:
|
||||
return row[0]
|
||||
|
||||
def get_local_file_version(self, relpath_u):
|
||||
"""
|
||||
Return the version of a local file tracked by our magic folder db.
|
||||
If no db entry is found then return None.
|
||||
"""
|
||||
c = self.cursor
|
||||
c.execute("SELECT version"
|
||||
" FROM local_files"
|
||||
" WHERE path=?",
|
||||
(relpath_u,))
|
||||
row = self.cursor.fetchone()
|
||||
if not row:
|
||||
return None
|
||||
else:
|
||||
return row[0]
|
||||
|
||||
def did_upload_version(self, filecap, relpath_u, version, pathinfo):
|
||||
print "did_upload_version(%r, %r, %r, %r)" % (filecap, relpath_u, version, pathinfo)
|
||||
try:
|
||||
print "insert"
|
||||
self.cursor.execute("INSERT INTO local_files VALUES (?,?,?,?,?,?)",
|
||||
(relpath_u, pathinfo.size, pathinfo.mtime, pathinfo.ctime, version, filecap, pathinfo.mtime))
|
||||
except (self.sqlite_module.IntegrityError, self.sqlite_module.OperationalError):
|
||||
print "err... update"
|
||||
self.cursor.execute("UPDATE local_files"
|
||||
" SET size=?, mtime=?, ctime=?, version=?, last_downloaded_uri=?, last_downloaded_timestamp=?"
|
||||
" WHERE path=?",
|
||||
(pathinfo.size, pathinfo.mtime, pathinfo.ctime, version, filecap, pathinfo.mtime, relpath_u))
|
||||
self.connection.commit()
|
||||
print "commited"
|
||||
|
||||
def is_new_file(self, pathinfo, relpath_u):
|
||||
"""
|
||||
Returns true if the file's current pathinfo (size, mtime, and ctime) has
|
||||
changed from the pathinfo previously stored in the db.
|
||||
"""
|
||||
#print "is_new_file(%r, %r)" % (pathinfo, relpath_u)
|
||||
c = self.cursor
|
||||
c.execute("SELECT size, mtime, ctime"
|
||||
" FROM local_files"
|
||||
" WHERE path=?",
|
||||
(relpath_u,))
|
||||
row = self.cursor.fetchone()
|
||||
if not row:
|
||||
return True
|
||||
return (pathinfo.size, pathinfo.mtime, pathinfo.ctime) != row
|
@ -12,29 +12,28 @@ from allmydata.util.dbutil import get_db, DBError
|
||||
DAY = 24*60*60
|
||||
MONTH = 30*DAY
|
||||
|
||||
MAIN_SCHEMA = """
|
||||
CREATE TABLE version
|
||||
SCHEMA_v1 = """
|
||||
CREATE TABLE version -- added in v1
|
||||
(
|
||||
version INTEGER -- contains one row, set to %s
|
||||
version INTEGER -- contains one row, set to 2
|
||||
);
|
||||
|
||||
CREATE TABLE local_files
|
||||
CREATE TABLE local_files -- added in v1
|
||||
(
|
||||
path VARCHAR(1024) PRIMARY KEY, -- index, this is an absolute UTF-8-encoded local filename
|
||||
-- note that size is before mtime and ctime here, but after in function parameters
|
||||
size INTEGER, -- os.stat(fn)[stat.ST_SIZE] (NULL if the file has been deleted)
|
||||
size INTEGER, -- os.stat(fn)[stat.ST_SIZE]
|
||||
mtime NUMBER, -- os.stat(fn)[stat.ST_MTIME]
|
||||
ctime NUMBER, -- os.stat(fn)[stat.ST_CTIME]
|
||||
fileid INTEGER%s
|
||||
fileid INTEGER
|
||||
);
|
||||
|
||||
CREATE TABLE caps
|
||||
CREATE TABLE caps -- added in v1
|
||||
(
|
||||
fileid INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
filecap VARCHAR(256) UNIQUE -- URI:CHK:...
|
||||
);
|
||||
|
||||
CREATE TABLE last_upload
|
||||
CREATE TABLE last_upload -- added in v1
|
||||
(
|
||||
fileid INTEGER PRIMARY KEY,
|
||||
last_uploaded TIMESTAMP,
|
||||
@ -43,8 +42,6 @@ CREATE TABLE last_upload
|
||||
|
||||
"""
|
||||
|
||||
SCHEMA_v1 = MAIN_SCHEMA % (1, "")
|
||||
|
||||
TABLE_DIRECTORY = """
|
||||
|
||||
CREATE TABLE directories -- added in v2
|
||||
@ -57,7 +54,7 @@ CREATE TABLE directories -- added in v2
|
||||
|
||||
"""
|
||||
|
||||
SCHEMA_v2 = MAIN_SCHEMA % (2, "") + TABLE_DIRECTORY
|
||||
SCHEMA_v2 = SCHEMA_v1 + TABLE_DIRECTORY
|
||||
|
||||
UPDATE_v1_to_v2 = TABLE_DIRECTORY + """
|
||||
UPDATE version SET version=2;
|
||||
@ -67,7 +64,6 @@ UPDATERS = {
|
||||
2: UPDATE_v1_to_v2,
|
||||
}
|
||||
|
||||
|
||||
def get_backupdb(dbfile, stderr=sys.stderr,
|
||||
create_version=(SCHEMA_v2, 2), just_create=False):
|
||||
# Open or create the given backupdb file. The parent directory must
|
||||
@ -75,11 +71,7 @@ def get_backupdb(dbfile, stderr=sys.stderr,
|
||||
try:
|
||||
(sqlite3, db) = get_db(dbfile, stderr, create_version, updaters=UPDATERS,
|
||||
just_create=just_create, dbname="backupdb")
|
||||
if create_version[1] in (1, 2):
|
||||
return BackupDB(sqlite3, db)
|
||||
else:
|
||||
print >>stderr, "invalid db schema version specified"
|
||||
return None
|
||||
return BackupDB_v2(sqlite3, db)
|
||||
except DBError, e:
|
||||
print >>stderr, e
|
||||
return None
|
||||
@ -135,7 +127,7 @@ class DirectoryResult:
|
||||
self.bdb.did_check_directory_healthy(self.dircap, results)
|
||||
|
||||
|
||||
class BackupDB:
|
||||
class BackupDB_v2:
|
||||
VERSION = 2
|
||||
NO_CHECK_BEFORE = 1*MONTH
|
||||
ALWAYS_CHECK_AFTER = 2*MONTH
|
||||
@ -145,21 +137,6 @@ class BackupDB:
|
||||
self.connection = connection
|
||||
self.cursor = connection.cursor()
|
||||
|
||||
def check_file_db_exists(self, path):
|
||||
"""I will tell you if a given file has an entry in my database or not
|
||||
by returning True or False.
|
||||
"""
|
||||
c = self.cursor
|
||||
c.execute("SELECT size,mtime,ctime,fileid"
|
||||
" FROM local_files"
|
||||
" WHERE path=?",
|
||||
(path,))
|
||||
row = self.cursor.fetchone()
|
||||
if not row:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def check_file(self, path, use_timestamps=True):
|
||||
"""I will tell you if a given local file needs to be uploaded or not,
|
||||
by looking in a database and seeing if I have a record of this file
|
||||
@ -182,9 +159,9 @@ class BackupDB:
|
||||
is not healthy, please upload the file and call r.did_upload(filecap)
|
||||
when you're done.
|
||||
|
||||
If use_timestamps=True (the default), I will compare mtime and ctime
|
||||
If use_timestamps=True (the default), I will compare ctime and mtime
|
||||
of the local file against an entry in my database, and consider the
|
||||
file to be unchanged if mtime, ctime, and filesize are all the same
|
||||
file to be unchanged if ctime, mtime, and filesize are all the same
|
||||
as the earlier version. If use_timestamps=False, I will not trust the
|
||||
timestamps, so more files (perhaps all) will be marked as needing
|
||||
upload. A future version of this database may hash the file to make
|
||||
@ -200,8 +177,8 @@ class BackupDB:
|
||||
# XXX consider using get_pathinfo
|
||||
s = os.stat(path)
|
||||
size = s[stat.ST_SIZE]
|
||||
mtime = s[stat.ST_MTIME]
|
||||
ctime = s[stat.ST_CTIME]
|
||||
mtime = s[stat.ST_MTIME]
|
||||
|
||||
now = time.time()
|
||||
c = self.cursor
|
@ -8,7 +8,7 @@ from allmydata.scripts.common import get_alias, escape_path, DEFAULT_ALIAS, \
|
||||
UnknownAliasError
|
||||
from allmydata.scripts.common_http import do_http, HTTPError, format_http_error
|
||||
from allmydata.util import time_format
|
||||
from allmydata import backupdb
|
||||
from allmydata.scripts import backupdb
|
||||
from allmydata.util.encodingutil import listdir_unicode, quote_output, \
|
||||
quote_local_unicode_path, to_str, FilenameEncodingError, unicode_to_url
|
||||
from allmydata.util.assertutil import precondition
|
||||
|
@ -151,7 +151,9 @@ def list(options):
|
||||
line.append(uri)
|
||||
if options["readonly-uri"]:
|
||||
line.append(quote_output(ro_uri or "-", quotemarks=False))
|
||||
|
||||
rows.append((encoding_error, line))
|
||||
|
||||
max_widths = []
|
||||
left_justifys = []
|
||||
for (encoding_error, row) in rows:
|
||||
|
@ -6,7 +6,7 @@ from twisted.trial import unittest
|
||||
from allmydata.util import fileutil
|
||||
from allmydata.util.encodingutil import listdir_unicode, get_filesystem_encoding, unicode_platform
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata import backupdb
|
||||
from allmydata.scripts import backupdb
|
||||
|
||||
class BackupDB(unittest.TestCase):
|
||||
def create(self, dbfile):
|
||||
|
@ -11,8 +11,7 @@ from allmydata.util import fileutil
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.encodingutil import get_io_encoding, unicode_to_argv
|
||||
from allmydata.util.namespace import Namespace
|
||||
from allmydata.scripts import cli
|
||||
from allmydata import backupdb
|
||||
from allmydata.scripts import cli, backupdb
|
||||
from .common_util import StallMixin
|
||||
from .no_network import GridTestMixin
|
||||
from .test_cli import CLITestMixin, parse_options
|
||||
|
@ -16,7 +16,7 @@ from .test_cli_magic_folder import MagicFolderCLITestMixin
|
||||
|
||||
from allmydata.frontends import magic_folder
|
||||
from allmydata.frontends.magic_folder import MagicFolder, Downloader
|
||||
from allmydata import backupdb, magicpath
|
||||
from allmydata import magicfolderdb, magicpath
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
|
||||
|
||||
@ -39,10 +39,10 @@ class MagicFolderTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, ReallyEqual
|
||||
|
||||
def _createdb(self):
|
||||
dbfile = abspath_expanduser_unicode(u"magicfolderdb.sqlite", base=self.basedir)
|
||||
bdb = backupdb.get_backupdb(dbfile, create_version=(backupdb.MAGIC_FOLDER_SCHEMA_v3, 3))
|
||||
self.failUnless(bdb, "unable to create backupdb from %r" % (dbfile,))
|
||||
self.failUnlessEqual(bdb.VERSION, 3)
|
||||
return bdb
|
||||
mdb = magicfolderdb.get_magicfolderdb(dbfile, create_version=(magicfolderdb.SCHEMA_v1, 1))
|
||||
self.failUnless(mdb, "unable to create magicfolderdb from %r" % (dbfile,))
|
||||
self.failUnlessEqual(mdb.VERSION, 1)
|
||||
return mdb
|
||||
|
||||
def _restart_client(self, ign):
|
||||
#print "_restart_client"
|
||||
|
Loading…
x
Reference in New Issue
Block a user