2009-02-03 04:09:02 +00:00
|
|
|
|
|
|
|
import os.path
|
|
|
|
import time
|
|
|
|
import urllib
|
|
|
|
import simplejson
|
2009-02-24 18:14:25 +00:00
|
|
|
import datetime
|
2009-02-03 04:09:02 +00:00
|
|
|
from allmydata.scripts.common import get_alias, escape_path, DEFAULT_ALIAS
|
|
|
|
from allmydata.scripts.common_http import do_http
|
|
|
|
from allmydata.util import time_format
|
2009-02-06 02:56:40 +00:00
|
|
|
from allmydata.scripts import backupdb
|
2009-02-03 04:09:02 +00:00
|
|
|
|
2009-02-23 00:31:06 +00:00
|
|
|
class HTTPError(Exception):
|
|
|
|
pass
|
|
|
|
|
2009-02-03 04:09:02 +00:00
|
|
|
def raiseHTTPError(msg, resp):
|
|
|
|
msg = msg + ": %s %s %s" % (resp.status, resp.reason, resp.read())
|
2009-02-23 00:31:06 +00:00
|
|
|
raise HTTPError(msg)
|
2009-02-03 04:09:02 +00:00
|
|
|
|
|
|
|
def get_local_metadata(path):
|
|
|
|
metadata = {}
|
|
|
|
|
|
|
|
# posix stat(2) metadata, depends on the platform
|
|
|
|
os.stat_float_times(True)
|
|
|
|
s = os.stat(path)
|
|
|
|
metadata["ctime"] = s.st_ctime
|
|
|
|
metadata["mtime"] = s.st_mtime
|
|
|
|
|
|
|
|
misc_fields = ("st_mode", "st_ino", "st_dev", "st_uid", "st_gid")
|
|
|
|
macos_misc_fields = ("st_rsize", "st_creator", "st_type")
|
|
|
|
for field in misc_fields + macos_misc_fields:
|
|
|
|
if hasattr(s, field):
|
|
|
|
metadata[field] = getattr(s, field)
|
|
|
|
|
|
|
|
# TODO: extended attributes, like on OS-X's HFS+
|
|
|
|
return metadata
|
|
|
|
|
|
|
|
def mkdir(contents, options):
|
2009-11-18 19:28:13 +00:00
|
|
|
kids = dict([ (childname, (contents[childname][0],
|
2009-02-03 04:09:02 +00:00
|
|
|
{"ro_uri": contents[childname][1],
|
|
|
|
"metadata": contents[childname][2],
|
|
|
|
}))
|
|
|
|
for childname in contents
|
|
|
|
])
|
2009-11-26 23:42:57 +00:00
|
|
|
body = simplejson.dumps(kids).encode("utf-8")
|
2009-11-18 19:28:13 +00:00
|
|
|
url = options['node-url'] + "uri?t=mkdir-immutable"
|
|
|
|
resp = do_http("POST", url, body)
|
|
|
|
if resp.status < 200 or resp.status >= 300:
|
|
|
|
raiseHTTPError("error during mkdir", resp)
|
|
|
|
dircap = str(resp.read().strip())
|
2009-02-03 04:09:02 +00:00
|
|
|
return dircap
|
|
|
|
|
|
|
|
def put_child(dirurl, childname, childcap):
|
|
|
|
assert dirurl[-1] == "/"
|
|
|
|
url = dirurl + urllib.quote(childname) + "?t=uri"
|
|
|
|
resp = do_http("PUT", url, childcap)
|
|
|
|
if resp.status not in (200, 201):
|
|
|
|
raiseHTTPError("error during put_child", resp)
|
|
|
|
|
2009-02-22 18:02:52 +00:00
|
|
|
class BackupProcessingError(Exception):
|
|
|
|
pass
|
|
|
|
|
2009-02-06 05:07:01 +00:00
|
|
|
class BackerUpper:
|
|
|
|
def __init__(self, options):
|
|
|
|
self.options = options
|
|
|
|
self.files_uploaded = 0
|
|
|
|
self.files_reused = 0
|
|
|
|
self.files_checked = 0
|
|
|
|
self.directories_created = 0
|
|
|
|
self.directories_reused = 0
|
|
|
|
self.directories_checked = 0
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
options = self.options
|
|
|
|
nodeurl = options['node-url']
|
|
|
|
self.verbosity = 1
|
|
|
|
if options['quiet']:
|
|
|
|
self.verbosity = 0
|
|
|
|
if options['verbose']:
|
|
|
|
self.verbosity = 2
|
|
|
|
stdout = options.stdout
|
|
|
|
stderr = options.stderr
|
|
|
|
|
2009-02-24 18:14:25 +00:00
|
|
|
start_timestamp = datetime.datetime.now()
|
2009-02-06 05:07:01 +00:00
|
|
|
self.backupdb = None
|
2009-06-04 17:31:31 +00:00
|
|
|
bdbfile = os.path.join(options["node-directory"],
|
|
|
|
"private", "backupdb.sqlite")
|
|
|
|
bdbfile = os.path.abspath(bdbfile)
|
|
|
|
self.backupdb = backupdb.get_backupdb(bdbfile, stderr)
|
|
|
|
if not self.backupdb:
|
|
|
|
print >>stderr, "ERROR: Unable to load backup db."
|
|
|
|
return 1
|
2009-02-06 05:07:01 +00:00
|
|
|
|
|
|
|
rootcap, path = get_alias(options.aliases, options.to_dir, DEFAULT_ALIAS)
|
|
|
|
to_url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
|
|
|
|
if path:
|
|
|
|
to_url += escape_path(path)
|
|
|
|
if not to_url.endswith("/"):
|
|
|
|
to_url += "/"
|
|
|
|
|
|
|
|
archives_url = to_url + "Archives/"
|
|
|
|
|
|
|
|
# first step: make sure the target directory exists, as well as the
|
|
|
|
# Archives/ subdirectory.
|
|
|
|
resp = do_http("GET", archives_url + "?t=json")
|
|
|
|
if resp.status == 404:
|
|
|
|
resp = do_http("POST", archives_url + "?t=mkdir")
|
|
|
|
if resp.status != 200:
|
|
|
|
print >>stderr, "Unable to create target directory: %s %s %s" % \
|
|
|
|
(resp.status, resp.reason, resp.read())
|
|
|
|
return 1
|
|
|
|
|
2009-11-26 23:42:57 +00:00
|
|
|
# second step: process the tree
|
|
|
|
new_backup_dircap = self.process(options.from_dir)
|
|
|
|
|
|
|
|
# third: attach the new backup to the list
|
2009-02-06 05:07:01 +00:00
|
|
|
now = time_format.iso_utc(int(time.time()), sep="_") + "Z"
|
|
|
|
|
2009-11-26 23:42:57 +00:00
|
|
|
put_child(archives_url, now, new_backup_dircap)
|
|
|
|
put_child(to_url, "Latest", new_backup_dircap)
|
2009-02-24 18:14:25 +00:00
|
|
|
end_timestamp = datetime.datetime.now()
|
|
|
|
# calc elapsed time, omitting microseconds
|
|
|
|
elapsed_time = str(end_timestamp - start_timestamp).split('.')[0]
|
2009-02-06 05:07:01 +00:00
|
|
|
|
|
|
|
if self.verbosity >= 1:
|
|
|
|
print >>stdout, (" %d files uploaded (%d reused), "
|
|
|
|
"%d directories created (%d reused)"
|
|
|
|
% (self.files_uploaded,
|
|
|
|
self.files_reused,
|
|
|
|
self.directories_created,
|
|
|
|
self.directories_reused))
|
|
|
|
if self.verbosity >= 2:
|
2009-11-26 23:42:57 +00:00
|
|
|
print >>stdout, (" %d files checked, %d directories checked"
|
2009-02-06 05:07:01 +00:00
|
|
|
% (self.files_checked,
|
2009-11-26 23:42:57 +00:00
|
|
|
self.directories_checked))
|
2009-02-24 18:14:25 +00:00
|
|
|
print >>stdout, " backup done, elapsed time: %s" % elapsed_time
|
2009-02-06 05:07:01 +00:00
|
|
|
# done!
|
|
|
|
return 0
|
|
|
|
|
2009-02-03 04:09:02 +00:00
|
|
|
def verboseprint(self, msg):
|
2009-02-06 05:07:01 +00:00
|
|
|
if self.verbosity >= 2:
|
2009-02-03 04:09:02 +00:00
|
|
|
print >>self.options.stdout, msg
|
|
|
|
|
2009-11-26 23:42:57 +00:00
|
|
|
def process(self, localpath):
|
2009-02-03 04:09:02 +00:00
|
|
|
# returns newdircap
|
|
|
|
|
2009-11-26 23:42:57 +00:00
|
|
|
self.verboseprint("processing %s" % localpath)
|
|
|
|
create_contents = {} # childname -> (type, rocap, metadata)
|
|
|
|
compare_contents = {} # childname -> rocap
|
2009-02-22 18:08:29 +00:00
|
|
|
for child in self.options.filter_listdir(os.listdir(localpath)):
|
2009-02-03 04:09:02 +00:00
|
|
|
childpath = os.path.join(localpath, child)
|
2009-11-26 23:42:57 +00:00
|
|
|
child = unicode(child)
|
2009-02-03 04:09:02 +00:00
|
|
|
if os.path.isdir(childpath):
|
|
|
|
metadata = get_local_metadata(childpath)
|
2009-02-06 05:07:01 +00:00
|
|
|
# recurse on the child directory
|
2009-11-26 23:42:57 +00:00
|
|
|
childcap = self.process(childpath)
|
|
|
|
assert isinstance(childcap, str)
|
|
|
|
create_contents[child] = ("dirnode", childcap, metadata)
|
|
|
|
compare_contents[child] = childcap
|
2009-02-03 04:09:02 +00:00
|
|
|
elif os.path.isfile(childpath):
|
2009-11-26 23:42:57 +00:00
|
|
|
childcap, metadata = self.upload(childpath)
|
|
|
|
assert isinstance(childcap, str)
|
|
|
|
create_contents[child] = ("filenode", childcap, metadata)
|
|
|
|
compare_contents[child] = childcap
|
2009-02-03 04:09:02 +00:00
|
|
|
else:
|
2009-11-26 23:42:57 +00:00
|
|
|
raise BackupProcessingError("Cannot backup child %r" % childpath)
|
|
|
|
|
|
|
|
must_create, r = self.check_backupdb_directory(compare_contents)
|
|
|
|
if must_create:
|
|
|
|
self.verboseprint(" creating directory for %s" % localpath)
|
|
|
|
newdircap = mkdir(create_contents, self.options)
|
|
|
|
assert isinstance(newdircap, str)
|
|
|
|
if r:
|
|
|
|
r.did_create(newdircap)
|
2009-02-06 05:07:01 +00:00
|
|
|
self.directories_created += 1
|
2009-11-26 23:42:57 +00:00
|
|
|
return newdircap
|
|
|
|
else:
|
|
|
|
self.verboseprint(" re-using old directory for %s" % localpath)
|
|
|
|
self.directories_reused += 1
|
|
|
|
return r.was_created()
|
2009-02-03 04:09:02 +00:00
|
|
|
|
2009-11-26 23:42:57 +00:00
|
|
|
def check_backupdb_file(self, childpath):
|
2009-02-06 05:07:01 +00:00
|
|
|
if not self.backupdb:
|
2009-02-06 02:56:40 +00:00
|
|
|
return True, None
|
|
|
|
use_timestamps = not self.options["ignore-timestamps"]
|
2009-02-06 05:07:01 +00:00
|
|
|
r = self.backupdb.check_file(childpath, use_timestamps)
|
2009-02-06 02:56:40 +00:00
|
|
|
|
|
|
|
if not r.was_uploaded():
|
|
|
|
return True, r
|
|
|
|
|
|
|
|
if not r.should_check():
|
|
|
|
# the file was uploaded or checked recently, so we can just use
|
|
|
|
# it
|
|
|
|
return False, r
|
|
|
|
|
|
|
|
# we must check the file before using the results
|
|
|
|
filecap = r.was_uploaded()
|
|
|
|
self.verboseprint("checking %s" % filecap)
|
|
|
|
nodeurl = self.options['node-url']
|
|
|
|
checkurl = nodeurl + "uri/%s?t=check&output=JSON" % urllib.quote(filecap)
|
2009-02-06 05:07:01 +00:00
|
|
|
self.files_checked += 1
|
2009-02-06 02:56:40 +00:00
|
|
|
resp = do_http("POST", checkurl)
|
|
|
|
if resp.status != 200:
|
|
|
|
# can't check, so we must assume it's bad
|
|
|
|
return True, r
|
|
|
|
|
|
|
|
cr = simplejson.loads(resp.read())
|
|
|
|
healthy = cr["results"]["healthy"]
|
|
|
|
if not healthy:
|
|
|
|
# must upload
|
|
|
|
return True, r
|
|
|
|
# file is healthy, no need to upload
|
|
|
|
r.did_check_healthy(cr)
|
|
|
|
return False, r
|
|
|
|
|
2009-11-26 23:42:57 +00:00
|
|
|
def check_backupdb_directory(self, compare_contents):
|
|
|
|
if not self.backupdb:
|
|
|
|
return True, None
|
|
|
|
r = self.backupdb.check_directory(compare_contents)
|
|
|
|
|
|
|
|
if not r.was_created():
|
|
|
|
return True, r
|
|
|
|
|
|
|
|
if not r.should_check():
|
|
|
|
# the file was uploaded or checked recently, so we can just use
|
|
|
|
# it
|
|
|
|
return False, r
|
|
|
|
|
|
|
|
# we must check the directory before re-using it
|
|
|
|
dircap = r.was_created()
|
|
|
|
self.verboseprint("checking %s" % dircap)
|
|
|
|
nodeurl = self.options['node-url']
|
|
|
|
checkurl = nodeurl + "uri/%s?t=check&output=JSON" % urllib.quote(dircap)
|
|
|
|
self.directories_checked += 1
|
|
|
|
resp = do_http("POST", checkurl)
|
2009-02-06 05:07:01 +00:00
|
|
|
if resp.status != 200:
|
2009-11-26 23:42:57 +00:00
|
|
|
# can't check, so we must assume it's bad
|
|
|
|
return True, r
|
|
|
|
|
|
|
|
cr = simplejson.loads(resp.read())
|
|
|
|
healthy = cr["results"]["healthy"]
|
|
|
|
if not healthy:
|
|
|
|
# must create
|
|
|
|
return True, r
|
|
|
|
# directory is healthy, no need to upload
|
|
|
|
r.did_check_healthy(cr)
|
|
|
|
return False, r
|
2009-02-06 05:07:01 +00:00
|
|
|
|
2009-02-03 04:09:02 +00:00
|
|
|
def upload(self, childpath):
|
2009-02-06 02:56:40 +00:00
|
|
|
#self.verboseprint("uploading %s.." % childpath)
|
|
|
|
metadata = get_local_metadata(childpath)
|
|
|
|
|
2009-02-03 04:09:02 +00:00
|
|
|
# we can use the backupdb here
|
2009-11-26 23:42:57 +00:00
|
|
|
must_upload, bdb_results = self.check_backupdb_file(childpath)
|
2009-02-03 04:09:02 +00:00
|
|
|
|
2009-02-06 02:56:40 +00:00
|
|
|
if must_upload:
|
|
|
|
self.verboseprint("uploading %s.." % childpath)
|
|
|
|
infileobj = open(os.path.expanduser(childpath), "rb")
|
|
|
|
url = self.options['node-url'] + "uri"
|
|
|
|
resp = do_http("PUT", url, infileobj)
|
|
|
|
if resp.status not in (200, 201):
|
|
|
|
raiseHTTPError("Error during file PUT", resp)
|
|
|
|
filecap = resp.read().strip()
|
|
|
|
self.verboseprint(" %s -> %s" % (childpath, filecap))
|
|
|
|
#self.verboseprint(" metadata: %s" % (metadata,))
|
|
|
|
|
|
|
|
if bdb_results:
|
|
|
|
bdb_results.did_upload(filecap)
|
|
|
|
|
2009-02-06 05:07:01 +00:00
|
|
|
self.files_uploaded += 1
|
2009-02-06 02:56:40 +00:00
|
|
|
return filecap, metadata
|
|
|
|
|
|
|
|
else:
|
|
|
|
self.verboseprint("skipping %s.." % childpath)
|
2009-02-06 05:07:01 +00:00
|
|
|
self.files_reused += 1
|
2009-02-06 02:56:40 +00:00
|
|
|
return bdb_results.was_uploaded(), metadata
|
2009-02-03 04:09:02 +00:00
|
|
|
|
2009-02-06 05:07:01 +00:00
|
|
|
def backup(options):
|
|
|
|
bu = BackerUpper(options)
|
|
|
|
return bu.run()
|