replaced print statement with print fuction for all tahoe_* scripts

This commit is contained in:
heartsucker 2019-03-24 14:09:10 +01:00
parent 70c461dfe2
commit fc417826f1
No known key found for this signature in database
GPG Key ID: C49FAAAA25756E79
18 changed files with 163 additions and 141 deletions

1
newsfragments/3009.other Normal file
View File

@ -0,0 +1 @@
Replaced print statement with print fuction for all tahoe_* scripts.

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import sys import sys
from collections import namedtuple from collections import namedtuple
@ -82,10 +83,10 @@ def get_magicfolderdb(dbfile, stderr=sys.stderr,
if create_version[1] in (1, 2): if create_version[1] in (1, 2):
return MagicFolderDB(sqlite3, db) return MagicFolderDB(sqlite3, db)
else: else:
print >>stderr, "invalid magicfolderdb schema version specified" print("invalid magicfolderdb schema version specified", file=stderr)
return None return None
except DBError, e: except DBError, e:
print >>stderr, e print(e, file=stderr)
return None return None
class LocalPath(object): class LocalPath(object):

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import os.path import os.path
import codecs import codecs
@ -39,22 +40,22 @@ def add_alias(options):
stderr = options.stderr stderr = options.stderr
if u":" in alias: if u":" in alias:
# a single trailing colon will already have been stripped if present # a single trailing colon will already have been stripped if present
print >>stderr, "Alias names cannot contain colons." print("Alias names cannot contain colons.", file=stderr)
return 1 return 1
if u" " in alias: if u" " in alias:
print >>stderr, "Alias names cannot contain spaces." print("Alias names cannot contain spaces.", file=stderr)
return 1 return 1
old_aliases = get_aliases(nodedir) old_aliases = get_aliases(nodedir)
if alias in old_aliases: if alias in old_aliases:
print >>stderr, "Alias %s already exists!" % quote_output(alias) print("Alias %s already exists!" % quote_output(alias), file=stderr)
return 1 return 1
aliasfile = os.path.join(nodedir, "private", "aliases") aliasfile = os.path.join(nodedir, "private", "aliases")
cap = uri.from_string_dirnode(cap).to_string() cap = uri.from_string_dirnode(cap).to_string()
add_line_to_aliasfile(aliasfile, alias, cap) add_line_to_aliasfile(aliasfile, alias, cap)
print >>stdout, "Alias %s added" % quote_output(alias) print("Alias %s added" % quote_output(alias), file=stdout)
return 0 return 0
def create_alias(options): def create_alias(options):
@ -66,15 +67,15 @@ def create_alias(options):
stderr = options.stderr stderr = options.stderr
if u":" in alias: if u":" in alias:
# a single trailing colon will already have been stripped if present # a single trailing colon will already have been stripped if present
print >>stderr, "Alias names cannot contain colons." print("Alias names cannot contain colons.", file=stderr)
return 1 return 1
if u" " in alias: if u" " in alias:
print >>stderr, "Alias names cannot contain spaces." print("Alias names cannot contain spaces.", file=stderr)
return 1 return 1
old_aliases = get_aliases(nodedir) old_aliases = get_aliases(nodedir)
if alias in old_aliases: if alias in old_aliases:
print >>stderr, "Alias %s already exists!" % quote_output(alias) print("Alias %s already exists!" % quote_output(alias), file=stderr)
return 1 return 1
aliasfile = os.path.join(nodedir, "private", "aliases") aliasfile = os.path.join(nodedir, "private", "aliases")
@ -93,7 +94,7 @@ def create_alias(options):
add_line_to_aliasfile(aliasfile, alias, new_uri) add_line_to_aliasfile(aliasfile, alias, new_uri)
print >>stdout, "Alias %s created" % (quote_output(alias),) print("Alias %s created" % (quote_output(alias),), file=stdout)
return 0 return 0
@ -124,20 +125,20 @@ def list_aliases(options):
if options['json']: if options['json']:
try: try:
# XXX why are we presuming utf-8 output? # XXX why are we presuming utf-8 output?
print >>stdout, json.dumps(data, indent=4).decode('utf-8') print(json.dumps(data, indent=4).decode('utf-8'), file=stdout)
except (UnicodeEncodeError, UnicodeDecodeError): except (UnicodeEncodeError, UnicodeDecodeError):
print >>stderr, json.dumps(data, indent=4) print(json.dumps(data, indent=4), file=stderr)
rc = 1 rc = 1
else: else:
for name, details in data.items(): for name, details in data.items():
dircap = details['readonly'] if options['readonly-uri'] else details['readwrite'] dircap = details['readonly'] if options['readonly-uri'] else details['readwrite']
try: try:
print >>stdout, fmt % (unicode_to_output(name), unicode_to_output(dircap.decode('utf-8'))) print(fmt % (unicode_to_output(name), unicode_to_output(dircap.decode('utf-8'))), file=stdout)
except (UnicodeEncodeError, UnicodeDecodeError): except (UnicodeEncodeError, UnicodeDecodeError):
print >>stderr, fmt % (quote_output(name), quote_output(dircap)) print(fmt % (quote_output(name), quote_output(dircap)), file=stderr)
rc = 1 rc = 1
if rc == 1: if rc == 1:
print >>stderr, "\nThis listing included aliases or caps that could not be converted to the terminal" \ print("\nThis listing included aliases or caps that could not be converted to the terminal" \
"\noutput encoding. These are shown using backslash escapes and in quotes." "\noutput encoding. These are shown using backslash escapes and in quotes.", file=stderr)
return rc return rc

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import os.path import os.path
import time import time
@ -88,7 +89,7 @@ class BackerUpper(object):
bdbfile = abspath_expanduser_unicode(bdbfile) bdbfile = abspath_expanduser_unicode(bdbfile)
self.backupdb = backupdb.get_backupdb(bdbfile, stderr) self.backupdb = backupdb.get_backupdb(bdbfile, stderr)
if not self.backupdb: if not self.backupdb:
print >>stderr, "ERROR: Unable to load backup db." print("ERROR: Unable to load backup db.", file=stderr)
return 1 return 1
try: try:
@ -110,7 +111,7 @@ class BackerUpper(object):
if resp.status == 404: if resp.status == 404:
resp = do_http("POST", archives_url + "?t=mkdir") resp = do_http("POST", archives_url + "?t=mkdir")
if resp.status != 200: if resp.status != 200:
print >>stderr, format_http_error("Unable to create target directory", resp) print(format_http_error("Unable to create target directory", resp), file=stderr)
return 1 return 1
# second step: process the tree # second step: process the tree
@ -134,11 +135,11 @@ class BackerUpper(object):
put_child(archives_url, now, new_backup_dircap) put_child(archives_url, now, new_backup_dircap)
put_child(to_url, "Latest", new_backup_dircap) put_child(to_url, "Latest", new_backup_dircap)
print >>stdout, completed.report( print(completed.report(
self.verbosity, self.verbosity,
self._files_checked, self._files_checked,
self._directories_checked, self._directories_checked,
) ), file=stdout)
# The command exits with code 2 if files or directories were skipped # The command exits with code 2 if files or directories were skipped
if completed.any_skips(): if completed.any_skips():
@ -150,11 +151,11 @@ class BackerUpper(object):
def verboseprint(self, msg): def verboseprint(self, msg):
precondition(isinstance(msg, str), msg) precondition(isinstance(msg, str), msg)
if self.verbosity >= 2: if self.verbosity >= 2:
print >>self.options.stdout, msg print(msg, file=self.options.stdout)
def warn(self, msg): def warn(self, msg):
precondition(isinstance(msg, str), msg) precondition(isinstance(msg, str), msg)
print >>self.options.stderr, msg print(msg, file=self.options.stderr)
def upload_directory(self, path, compare_contents, create_contents): def upload_directory(self, path, compare_contents, create_contents):
must_create, r = self.check_backupdb_directory(compare_contents) must_create, r = self.check_backupdb_directory(compare_contents)
@ -323,7 +324,7 @@ def run_backup(
# Currently, BackupProgress is mutable, though, and everything just # Currently, BackupProgress is mutable, though, and everything just
# mutates it. # mutates it.
progress = target.backup(progress, upload_file, upload_directory) progress = target.backup(progress, upload_file, upload_directory)
print >>stdout, progress.report(datetime.datetime.now()) print(progress.report(datetime.datetime.now()), file=stdout)
return progress.backup_finished() return progress.backup_finished()

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import urllib import urllib
import json import json
@ -42,7 +43,7 @@ def check_location(options, where):
resp = do_http("POST", url) resp = do_http("POST", url)
if resp.status != 200: if resp.status != 200:
print >>stderr, format_http_error("ERROR", resp) print(format_http_error("ERROR", resp), file=stderr)
return 1 return 1
jdata = resp.read() jdata = resp.read()
if options.get("raw"): if options.get("raw"):
@ -129,12 +130,12 @@ class DeepCheckOutput(LineOnlyReceiver):
def lineReceived(self, line): def lineReceived(self, line):
if self.in_error: if self.in_error:
print >>self.stderr, quote_output(line, quotemarks=False) print(quote_output(line, quotemarks=False), file=self.stderr)
return return
if line.startswith("ERROR:"): if line.startswith("ERROR:"):
self.in_error = True self.in_error = True
self.streamer.rc = 1 self.streamer.rc = 1
print >>self.stderr, quote_output(line, quotemarks=False) print(quote_output(line, quotemarks=False), file=self.stderr)
return return
d = json.loads(line) d = json.loads(line)
@ -144,7 +145,7 @@ class DeepCheckOutput(LineOnlyReceiver):
self.num_objects += 1 self.num_objects += 1
# non-verbose means print a progress marker every 100 files # non-verbose means print a progress marker every 100 files
if self.num_objects % 100 == 0: if self.num_objects % 100 == 0:
print >>stdout, "%d objects checked.." % self.num_objects print("%d objects checked.." % self.num_objects, file=stdout)
cr = d["check-results"] cr = d["check-results"]
if cr["results"]["healthy"]: if cr["results"]["healthy"]:
self.files_healthy += 1 self.files_healthy += 1
@ -158,19 +159,19 @@ class DeepCheckOutput(LineOnlyReceiver):
# LIT files and directories do not have a "summary" field. # LIT files and directories do not have a "summary" field.
summary = cr.get("summary", "Healthy (LIT)") summary = cr.get("summary", "Healthy (LIT)")
print >>stdout, "%s: %s" % (quote_path(path), quote_output(summary, quotemarks=False)) print("%s: %s" % (quote_path(path), quote_output(summary, quotemarks=False)), file=stdout)
# always print out corrupt shares # always print out corrupt shares
for shareloc in cr["results"].get("list-corrupt-shares", []): for shareloc in cr["results"].get("list-corrupt-shares", []):
(serverid, storage_index, sharenum) = shareloc (serverid, storage_index, sharenum) = shareloc
print >>stdout, " corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum) print(" corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum), file=stdout)
def done(self): def done(self):
if self.in_error: if self.in_error:
return return
stdout = self.stdout stdout = self.stdout
print >>stdout, "done: %d objects checked, %d healthy, %d unhealthy" \ print("done: %d objects checked, %d healthy, %d unhealthy" \
% (self.num_objects, self.files_healthy, self.files_unhealthy) % (self.num_objects, self.files_healthy, self.files_unhealthy), file=stdout)
class DeepCheckAndRepairOutput(LineOnlyReceiver): class DeepCheckAndRepairOutput(LineOnlyReceiver):
delimiter = "\n" delimiter = "\n"
@ -192,12 +193,12 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver):
def lineReceived(self, line): def lineReceived(self, line):
if self.in_error: if self.in_error:
print >>self.stderr, quote_output(line, quotemarks=False) print(quote_output(line, quotemarks=False), file=self.stderr)
return return
if line.startswith("ERROR:"): if line.startswith("ERROR:"):
self.in_error = True self.in_error = True
self.streamer.rc = 1 self.streamer.rc = 1
print >>self.stderr, quote_output(line, quotemarks=False) print(quote_output(line, quotemarks=False), file=self.stderr)
return return
d = json.loads(line) d = json.loads(line)
@ -207,7 +208,7 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver):
self.num_objects += 1 self.num_objects += 1
# non-verbose means print a progress marker every 100 files # non-verbose means print a progress marker every 100 files
if self.num_objects % 100 == 0: if self.num_objects % 100 == 0:
print >>stdout, "%d objects checked.." % self.num_objects print("%d objects checked.." % self.num_objects, file=stdout)
crr = d["check-and-repair-results"] crr = d["check-and-repair-results"]
if d["storage-index"]: if d["storage-index"]:
if crr["pre-repair-results"]["results"]["healthy"]: if crr["pre-repair-results"]["results"]["healthy"]:
@ -239,36 +240,36 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver):
summary = "healthy" summary = "healthy"
else: else:
summary = "not healthy" summary = "not healthy"
print >>stdout, "%s: %s" % (quote_path(path), summary) print("%s: %s" % (quote_path(path), summary), file=stdout)
# always print out corrupt shares # always print out corrupt shares
prr = crr.get("pre-repair-results", {}) prr = crr.get("pre-repair-results", {})
for shareloc in prr.get("results", {}).get("list-corrupt-shares", []): for shareloc in prr.get("results", {}).get("list-corrupt-shares", []):
(serverid, storage_index, sharenum) = shareloc (serverid, storage_index, sharenum) = shareloc
print >>stdout, " corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum) print(" corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum), file=stdout)
# always print out repairs # always print out repairs
if crr["repair-attempted"]: if crr["repair-attempted"]:
if crr["repair-successful"]: if crr["repair-successful"]:
print >>stdout, " repair successful" print(" repair successful", file=stdout)
else: else:
print >>stdout, " repair failed" print(" repair failed", file=stdout)
def done(self): def done(self):
if self.in_error: if self.in_error:
return return
stdout = self.stdout stdout = self.stdout
print >>stdout, "done: %d objects checked" % self.num_objects print("done: %d objects checked" % self.num_objects, file=stdout)
print >>stdout, " pre-repair: %d healthy, %d unhealthy" \ print(" pre-repair: %d healthy, %d unhealthy" \
% (self.pre_repair_files_healthy, % (self.pre_repair_files_healthy,
self.pre_repair_files_unhealthy) self.pre_repair_files_unhealthy), file=stdout)
print >>stdout, " %d repairs attempted, %d successful, %d failed" \ print(" %d repairs attempted, %d successful, %d failed" \
% (self.repairs_attempted, % (self.repairs_attempted,
self.repairs_successful, self.repairs_successful,
(self.repairs_attempted - self.repairs_successful)) (self.repairs_attempted - self.repairs_successful)), file=stdout)
print >>stdout, " post-repair: %d healthy, %d unhealthy" \ print(" post-repair: %d healthy, %d unhealthy" \
% (self.post_repair_files_healthy, % (self.post_repair_files_healthy,
self.post_repair_files_unhealthy) self.post_repair_files_unhealthy), file=stdout)
class DeepCheckStreamer(LineOnlyReceiver): class DeepCheckStreamer(LineOnlyReceiver):
@ -305,7 +306,7 @@ class DeepCheckStreamer(LineOnlyReceiver):
url += "&add-lease=true" url += "&add-lease=true"
resp = do_http("POST", url) resp = do_http("POST", url)
if resp.status not in (200, 302): if resp.status not in (200, 302):
print >>stderr, format_http_error("ERROR", resp) print(format_http_error("ERROR", resp), file=stderr)
return 1 return 1
# use Twisted to split this into lines # use Twisted to split this into lines

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import os.path import os.path
import urllib import urllib
@ -480,7 +481,7 @@ class Copier:
self.stderr = options.stderr self.stderr = options.stderr
if verbosity >= 2 and not self.progressfunc: if verbosity >= 2 and not self.progressfunc:
def progress(message): def progress(message):
print >>self.stderr, message print(message, file=self.stderr)
self.progressfunc = progress self.progressfunc = progress
self.caps_only = options["caps-only"] self.caps_only = options["caps-only"]
self.cache = {} self.cache = {}
@ -490,7 +491,7 @@ class Copier:
except TahoeError, te: except TahoeError, te:
if verbosity >= 2: if verbosity >= 2:
Failure().printTraceback(self.stderr) Failure().printTraceback(self.stderr)
print >>self.stderr print(file=self.stderr)
te.display(self.stderr) te.display(self.stderr)
return 1 return 1
@ -579,7 +580,7 @@ class Copier:
return self.copy_things_to_directory(sources, target) return self.copy_things_to_directory(sources, target)
def to_stderr(self, text): def to_stderr(self, text):
print >>self.stderr, text print(text, file=self.stderr)
# FIXME reduce the amount of near-duplicate code between get_target_info # FIXME reduce the amount of near-duplicate code between get_target_info
# and get_source_info. # and get_source_info.
@ -698,7 +699,7 @@ class Copier:
def announce_success(self, msg): def announce_success(self, msg):
if self.verbosity >= 1: if self.verbosity >= 1:
print >>self.stdout, "Success: %s" % msg print("Success: %s" % msg, file=self.stdout)
return 0 return 0
def copy_file_to_file(self, source, target): def copy_file_to_file(self, source, target):

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import os, sys import os, sys
from allmydata.scripts.common import BasedirOptions from allmydata.scripts.common import BasedirOptions
@ -181,13 +182,13 @@ def daemonize(config):
err = config.stderr err = config.stderr
basedir = config['basedir'] basedir = config['basedir']
quoted_basedir = quote_local_unicode_path(basedir) quoted_basedir = quote_local_unicode_path(basedir)
print >>out, "daemonizing in {}".format(quoted_basedir) print("daemonizing in {}".format(quoted_basedir), file=out)
if not os.path.isdir(basedir): if not os.path.isdir(basedir):
print >>err, "%s does not look like a directory at all" % quoted_basedir print("%s does not look like a directory at all" % quoted_basedir, file=err)
return 1 return 1
nodetype = identify_node_type(basedir) nodetype = identify_node_type(basedir)
if not nodetype: if not nodetype:
print >>err, "%s is not a recognizable node directory" % quoted_basedir print("%s is not a recognizable node directory" % quoted_basedir, file=err)
return 1 return 1
# Now prepare to turn into a twistd process. This os.chdir is the point # Now prepare to turn into a twistd process. This os.chdir is the point
# of no return. # of no return.
@ -207,15 +208,15 @@ def daemonize(config):
twistd_config.parseOptions(twistd_args) twistd_config.parseOptions(twistd_args)
except usage.error, ue: except usage.error, ue:
# these arguments were unsuitable for 'twistd' # these arguments were unsuitable for 'twistd'
print >>err, config print(config, file=err)
print >>err, "tahoe %s: usage error from twistd: %s\n" % (config.subcommand_name, ue) print("tahoe %s: usage error from twistd: %s\n" % (config.subcommand_name, ue), file=err)
return 1 return 1
twistd_config.loadedPlugins = {"DaemonizeTahoeNode": DaemonizeTahoeNodePlugin(nodetype, basedir)} twistd_config.loadedPlugins = {"DaemonizeTahoeNode": DaemonizeTahoeNodePlugin(nodetype, basedir)}
# handle invalid PID file (twistd might not start otherwise) # handle invalid PID file (twistd might not start otherwise)
pidfile = get_pidfile(basedir) pidfile = get_pidfile(basedir)
if get_pid_from_pidfile(pidfile) == -1: if get_pid_from_pidfile(pidfile) == -1:
print >>err, "found invalid PID file in %s - deleting it" % basedir print("found invalid PID file in %s - deleting it" % basedir, file=err)
os.remove(pidfile) os.remove(pidfile)
# On Unix-like platforms: # On Unix-like platforms:
@ -249,7 +250,7 @@ def daemonize(config):
else: else:
verb = "starting" verb = "starting"
print >>out, "%s node in %s" % (verb, quoted_basedir) print("%s node in %s" % (verb, quoted_basedir), file=out)
twistd.runApp(twistd_config) twistd.runApp(twistd_config)
# we should only reach here if --nodaemon or equivalent was used # we should only reach here if --nodaemon or equivalent was used
return 0 return 0

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import urllib import urllib
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \ from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
@ -38,7 +39,7 @@ def get(options):
outf.close() outf.close()
rc = 0 rc = 0
else: else:
print >>stderr, format_http_error("Error during GET", resp) print(format_http_error("Error during GET", resp), file=stderr)
rc = 1 rc = 1
return rc return rc

View File

@ -1,3 +1,5 @@
from __future__ import print_function
import json import json
from os.path import join from os.path import join
@ -34,19 +36,19 @@ def _send_config_via_wormhole(options, config):
out = options.stdout out = options.stdout
err = options.stderr err = options.stderr
relay_url = options.parent['wormhole-server'] relay_url = options.parent['wormhole-server']
print >>out, "Connecting to '{}'...".format(relay_url) print("Connecting to '{}'...".format(relay_url), file=out)
wh = wormhole.create( wh = wormhole.create(
appid=options.parent['wormhole-invite-appid'], appid=options.parent['wormhole-invite-appid'],
relay_url=relay_url, relay_url=relay_url,
reactor=reactor, reactor=reactor,
) )
yield wh.get_welcome() yield wh.get_welcome()
print >>out, "Connected to wormhole server" print("Connected to wormhole server", file=out)
# must call allocate_code before get_code will ever succeed # must call allocate_code before get_code will ever succeed
wh.allocate_code() wh.allocate_code()
code = yield wh.get_code() code = yield wh.get_code()
print >>out, "Invite Code for client: {}".format(code) print("Invite Code for client: {}".format(code), file=out)
wh.send_message(json.dumps({ wh.send_message(json.dumps({
u"abilities": { u"abilities": {
@ -55,16 +57,16 @@ def _send_config_via_wormhole(options, config):
})) }))
client_intro = yield wh.get_message() client_intro = yield wh.get_message()
print >>out, " received client introduction" print(" received client introduction", file=out)
client_intro = json.loads(client_intro) client_intro = json.loads(client_intro)
if not u'abilities' in client_intro: if not u'abilities' in client_intro:
print >>err, "No 'abilities' from client" print("No 'abilities' from client", file=err)
defer.returnValue(1) defer.returnValue(1)
if not u'client-v1' in client_intro[u'abilities']: if not u'client-v1' in client_intro[u'abilities']:
print >>err, "No 'client-v1' in abilities from client" print("No 'client-v1' in abilities from client", file=err)
defer.returnValue(1) defer.returnValue(1)
print >>out, " transmitting configuration" print(" transmitting configuration", file=out)
wh.send_message(json.dumps(config)) wh.send_message(json.dumps(config))
yield wh.close() yield wh.close()
@ -82,7 +84,7 @@ def invite(options):
try: try:
introducer_furl = get_introducer_furl(basedir, config) introducer_furl = get_introducer_furl(basedir, config)
except Exception as e: except Exception as e:
print >>err, "Can't find introducer FURL for node '{}': {}".format(basedir, str(e)) print("Can't find introducer FURL for node '{}': {}".format(basedir, str(e)), file=err)
raise SystemExit(1) raise SystemExit(1)
nick = options['nick'] nick = options['nick']
@ -96,7 +98,7 @@ def invite(options):
} }
yield _send_config_via_wormhole(options, remote_config) yield _send_config_via_wormhole(options, remote_config)
print >>out, "Completed successfully" print("Completed successfully", file=out)
subCommands = [ subCommands = [

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import urllib, time import urllib, time
import json import json
@ -30,10 +31,10 @@ def list(options):
url += "?t=json" url += "?t=json"
resp = do_http("GET", url) resp = do_http("GET", url)
if resp.status == 404: if resp.status == 404:
print >>stderr, "No such file or directory" print("No such file or directory", file=stderr)
return 2 return 2
if resp.status != 200: if resp.status != 200:
print >>stderr, format_http_error("Error during GET", resp) print(format_http_error("Error during GET", resp), file=stderr)
if resp.status == 0: if resp.status == 0:
return 3 return 3
else: else:
@ -44,19 +45,19 @@ def list(options):
if options['json']: if options['json']:
# The webapi server should always output printable ASCII. # The webapi server should always output printable ASCII.
if is_printable_ascii(data): if is_printable_ascii(data):
print >>stdout, data print(data, file=stdout)
return 0 return 0
else: else:
print >>stderr, "The JSON response contained unprintable characters:" print("The JSON response contained unprintable characters:", file=stderr)
print >>stderr, quote_output(data, quotemarks=False) print(quote_output(data, quotemarks=False), file=stderr)
return 1 return 1
try: try:
parsed = json.loads(data) parsed = json.loads(data)
except Exception, e: except Exception, e:
print >>stderr, "error: %s" % quote_output(e.args[0], quotemarks=False) print("error: %s" % quote_output(e.args[0], quotemarks=False), file=stderr)
print >>stderr, "Could not parse JSON response:" print("Could not parse JSON response:", file=stderr)
print >>stderr, quote_output(data, quotemarks=False) print(quote_output(data, quotemarks=False), file=stderr)
return 1 return 1
nodetype, d = parsed nodetype, d = parsed
@ -180,16 +181,16 @@ def list(options):
rc = 0 rc = 0
for (encoding_error, row) in rows: for (encoding_error, row) in rows:
if encoding_error: if encoding_error:
print >>stderr, (fmt % tuple(row)).rstrip() print((fmt % tuple(row)).rstrip(), file=stderr)
rc = 1 rc = 1
else: else:
print >>stdout, (fmt % tuple(row)).rstrip() print((fmt % tuple(row)).rstrip(), file=stdout)
if rc == 1: if rc == 1:
print >>stderr, "\nThis listing included files whose names could not be converted to the terminal" \ print("\nThis listing included files whose names could not be converted to the terminal" \
"\noutput encoding. Their names are shown using backslash escapes and in quotes." "\noutput encoding. Their names are shown using backslash escapes and in quotes.", file=stderr)
if has_unknowns: if has_unknowns:
print >>stderr, "\nThis listing included unknown objects. Using a webapi server that supports" \ print("\nThis listing included unknown objects. Using a webapi server that supports" \
"\na later version of Tahoe may help." "\na later version of Tahoe may help.", file=stderr)
return rc return rc

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import urllib, json import urllib, json
from twisted.protocols.basic import LineOnlyReceiver from twisted.protocols.basic import LineOnlyReceiver
@ -41,7 +42,7 @@ class ManifestStreamer(LineOnlyReceiver):
url += "?t=stream-manifest" url += "?t=stream-manifest"
resp = do_http("POST", url) resp = do_http("POST", url)
if resp.status not in (200, 302): if resp.status not in (200, 302):
print >>stderr, format_http_error("ERROR", resp) print(format_http_error("ERROR", resp), file=stderr)
return 1 return 1
#print "RESP", dir(resp) #print "RESP", dir(resp)
# use Twisted to split this into lines # use Twisted to split this into lines
@ -60,35 +61,35 @@ class ManifestStreamer(LineOnlyReceiver):
stdout = self.options.stdout stdout = self.options.stdout
stderr = self.options.stderr stderr = self.options.stderr
if self.in_error: if self.in_error:
print >>stderr, quote_output(line, quotemarks=False) print(quote_output(line, quotemarks=False), file=stderr)
return return
if line.startswith("ERROR:"): if line.startswith("ERROR:"):
self.in_error = True self.in_error = True
self.rc = 1 self.rc = 1
print >>stderr, quote_output(line, quotemarks=False) print(quote_output(line, quotemarks=False), file=stderr)
return return
try: try:
d = json.loads(line.decode('utf-8')) d = json.loads(line.decode('utf-8'))
except Exception, e: except Exception, e:
print >>stderr, "ERROR could not decode/parse %s\nERROR %r" % (quote_output(line), e) print("ERROR could not decode/parse %s\nERROR %r" % (quote_output(line), e), file=stderr)
else: else:
if d["type"] in ("file", "directory"): if d["type"] in ("file", "directory"):
if self.options["storage-index"]: if self.options["storage-index"]:
si = d.get("storage-index", None) si = d.get("storage-index", None)
if si: if si:
print >>stdout, quote_output(si, quotemarks=False) print(quote_output(si, quotemarks=False), file=stdout)
elif self.options["verify-cap"]: elif self.options["verify-cap"]:
vc = d.get("verifycap", None) vc = d.get("verifycap", None)
if vc: if vc:
print >>stdout, quote_output(vc, quotemarks=False) print(quote_output(vc, quotemarks=False), file=stdout)
elif self.options["repair-cap"]: elif self.options["repair-cap"]:
vc = d.get("repaircap", None) vc = d.get("repaircap", None)
if vc: if vc:
print >>stdout, quote_output(vc, quotemarks=False) print(quote_output(vc, quotemarks=False), file=stdout)
else: else:
print >>stdout, "%s %s" % (quote_output(d["cap"], quotemarks=False), print("%s %s" % (quote_output(d["cap"], quotemarks=False),
quote_path(d["path"], quotemarks=False)) quote_path(d["path"], quotemarks=False)), file=stdout)
def manifest(options): def manifest(options):
return ManifestStreamer().run(options) return ManifestStreamer().run(options)
@ -113,18 +114,18 @@ class StatsGrabber(SlowOperationRunner):
"largest-immutable-file", "largest-immutable-file",
) )
width = max([len(k) for k in keys]) width = max([len(k) for k in keys])
print >>stdout, "Counts and Total Sizes:" print("Counts and Total Sizes:", file=stdout)
for k in keys: for k in keys:
fmt = "%" + str(width) + "s: %d" fmt = "%" + str(width) + "s: %d"
if k in data: if k in data:
value = data[k] value = data[k]
if not k.startswith("count-") and value > 1000: if not k.startswith("count-") and value > 1000:
absize = abbreviate_space_both(value) absize = abbreviate_space_both(value)
print >>stdout, fmt % (k, data[k]), " ", absize print(fmt % (k, data[k]), " ", absize, file=stdout)
else: else:
print >>stdout, fmt % (k, data[k]) print(fmt % (k, data[k]), file=stdout)
if data["size-files-histogram"]: if data["size-files-histogram"]:
print >>stdout, "Size Histogram:" print("Size Histogram:", file=stdout)
prevmax = None prevmax = None
maxlen = max([len(str(maxsize)) maxlen = max([len(str(maxsize))
for (minsize, maxsize, count) for (minsize, maxsize, count)
@ -138,10 +139,10 @@ class StatsGrabber(SlowOperationRunner):
linefmt = minfmt + "-" + maxfmt + " : " + countfmt + " %s" linefmt = minfmt + "-" + maxfmt + " : " + countfmt + " %s"
for (minsize, maxsize, count) in data["size-files-histogram"]: for (minsize, maxsize, count) in data["size-files-histogram"]:
if prevmax is not None and minsize != prevmax+1: if prevmax is not None and minsize != prevmax+1:
print >>stdout, " "*(maxlen-1) + "..." print(" "*(maxlen-1) + "...", file=stdout)
prevmax = maxsize prevmax = maxsize
print >>stdout, linefmt % (minsize, maxsize, count, print(linefmt % (minsize, maxsize, count,
abbreviate_space_both(maxsize)) abbreviate_space_both(maxsize)), file=stdout)
def stats(options): def stats(options):
return StatsGrabber().run(options) return StatsGrabber().run(options)

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import urllib import urllib
from allmydata.scripts.common_http import do_http, check_http_error from allmydata.scripts.common_http import do_http, check_http_error
@ -30,7 +31,7 @@ def mkdir(options):
return rc return rc
new_uri = resp.read().strip() new_uri = resp.read().strip()
# emit its write-cap # emit its write-cap
print >>stdout, quote_output(new_uri, quotemarks=False) print(quote_output(new_uri, quotemarks=False), file=stdout)
return 0 return 0
# create a new directory at the given location # create a new directory at the given location
@ -45,5 +46,5 @@ def mkdir(options):
resp = do_http("POST", url) resp = do_http("POST", url)
check_http_error(resp, stderr) check_http_error(resp, stderr)
new_uri = resp.read().strip() new_uri = resp.read().strip()
print >>stdout, quote_output(new_uri, quotemarks=False) print(quote_output(new_uri, quotemarks=False), file=stdout)
return 0 return 0

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import re import re
import urllib import urllib
@ -30,7 +31,7 @@ def mv(options, mode="move"):
# figure out the source cap # figure out the source cap
resp = do_http("GET", from_url + "?t=json") resp = do_http("GET", from_url + "?t=json")
if not re.search(r'^2\d\d$', str(resp.status)): if not re.search(r'^2\d\d$', str(resp.status)):
print >>stderr, format_http_error("Error", resp) print(format_http_error("Error", resp), file=stderr)
return 1 return 1
data = resp.read() data = resp.read()
nodetype, attrs = json.loads(data) nodetype, attrs = json.loads(data)
@ -56,19 +57,19 @@ def mv(options, mode="move"):
status = resp.status status = resp.status
if not re.search(r'^2\d\d$', str(status)): if not re.search(r'^2\d\d$', str(status)):
if status == 409: if status == 409:
print >>stderr, "Error: You can't overwrite a directory with a file" print("Error: You can't overwrite a directory with a file", file=stderr)
else: else:
print >>stderr, format_http_error("Error", resp) print(format_http_error("Error", resp), file=stderr)
if mode == "move": if mode == "move":
print >>stderr, "NOT removing the original" print("NOT removing the original", file=stderr)
return 1 return 1
if mode == "move": if mode == "move":
# now remove the original # now remove the original
resp = do_http("DELETE", from_url) resp = do_http("DELETE", from_url)
if not re.search(r'^2\d\d$', str(resp.status)): if not re.search(r'^2\d\d$', str(resp.status)):
print >>stderr, format_http_error("Error deleting original after move", resp) print(format_http_error("Error deleting original after move", resp), file=stderr)
return 2 return 2
print >>stdout, "OK" print("OK", file=stdout)
return 0 return 0

View File

@ -1,3 +1,4 @@
from __future__ import print_function
from cStringIO import StringIO from cStringIO import StringIO
import urllib import urllib
@ -54,8 +55,8 @@ def put(options):
return 1 return 1
if path.startswith("/"): if path.startswith("/"):
suggestion = to_file.replace(u"/", u"", 1) suggestion = to_file.replace(u"/", u"", 1)
print >>stderr, "Error: The remote filename must not start with a slash" print("Error: The remote filename must not start with a slash", file=stderr)
print >>stderr, "Please try again, perhaps with %s" % quote_output(suggestion) print("Please try again, perhaps with %s" % quote_output(suggestion), file=stderr)
return 1 return 1
url = nodeurl + "uri/%s/" % urllib.quote(rootcap) url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
if path: if path:
@ -78,16 +79,16 @@ def put(options):
# do_http() can't use stdin directly: for one thing, we need a # do_http() can't use stdin directly: for one thing, we need a
# Content-Length field. So we currently must copy it. # Content-Length field. So we currently must copy it.
if verbosity > 0: if verbosity > 0:
print >>stderr, "waiting for file data on stdin.." print("waiting for file data on stdin..", file=stderr)
data = stdin.read() data = stdin.read()
infileobj = StringIO(data) infileobj = StringIO(data)
resp = do_http("PUT", url, infileobj) resp = do_http("PUT", url, infileobj)
if resp.status in (200, 201,): if resp.status in (200, 201,):
print >>stderr, format_http_success(resp) print(format_http_success(resp), file=stderr)
print >>stdout, quote_output(resp.read(), quotemarks=False) print(quote_output(resp.read(), quotemarks=False), file=stdout)
return 0 return 0
print >>stderr, format_http_error("Error", resp) print(format_http_error("Error", resp), file=stderr)
return 1 return 1

View File

@ -1,3 +1,5 @@
from __future__ import print_function
from .tahoe_start import StartOptions, start from .tahoe_start import StartOptions, start
from .tahoe_stop import stop, COULD_NOT_STOP from .tahoe_stop import stop, COULD_NOT_STOP
@ -10,9 +12,9 @@ def restart(config):
stderr = config.stderr stderr = config.stderr
rc = stop(config) rc = stop(config)
if rc == COULD_NOT_STOP: if rc == COULD_NOT_STOP:
print >>stderr, "ignoring couldn't-stop" print("ignoring couldn't-stop", file=stderr)
rc = 0 rc = 0
if rc: if rc:
print >>stderr, "not restarting" print("not restarting", file=stderr)
return rc return rc
return start(config) return start(config)

View File

@ -1,3 +1,5 @@
from __future__ import print_function
import os import os
import io import io
import sys import sys
@ -63,13 +65,13 @@ def start(config):
err = config.stderr err = config.stderr
basedir = config['basedir'] basedir = config['basedir']
quoted_basedir = quote_local_unicode_path(basedir) quoted_basedir = quote_local_unicode_path(basedir)
print >>out, "STARTING", quoted_basedir print("STARTING", quoted_basedir, file=out)
if not os.path.isdir(basedir): if not os.path.isdir(basedir):
print >>err, "%s does not look like a directory at all" % quoted_basedir print("%s does not look like a directory at all" % quoted_basedir, file=err)
return 1 return 1
nodetype = identify_node_type(basedir) nodetype = identify_node_type(basedir)
if not nodetype: if not nodetype:
print >>err, "%s is not a recognizable node directory" % quoted_basedir print("%s is not a recognizable node directory" % quoted_basedir, file=err)
return 1 return 1
# "tahoe start" attempts to monitor the logs for successful # "tahoe start" attempts to monitor the logs for successful
@ -86,7 +88,7 @@ def start(config):
return 0 return 0
if not can_monitor_logs: if not can_monitor_logs:
print >>out, "Custom logging options; can't monitor logs for proper startup messages" print("Custom logging options; can't monitor logs for proper startup messages", file=out)
return 1 return 1
# before we spawn tahoe, we check if "the log file" exists or not, # before we spawn tahoe, we check if "the log file" exists or not,
@ -130,21 +132,21 @@ def start(config):
collected += f.read() collected += f.read()
if magic_string in collected: if magic_string in collected:
if not config.parent['quiet']: if not config.parent['quiet']:
print >>out, "Node has started successfully" print("Node has started successfully", file=out)
return 0 return 0
if 'Traceback ' in collected: if 'Traceback ' in collected:
print >>err, "Error starting node; see '{}' for more:\n\n{}".format( print("Error starting node; see '{}' for more:\n\n{}".format(
log_fname, log_fname,
collected, collected,
) ), file=err)
return 1 return 1
time.sleep(0.1) time.sleep(0.1)
print >>out, "Still waiting up to {}s for node startup".format( print("Still waiting up to {}s for node startup".format(
60 - int(time.time() - overall_start) 60 - int(time.time() - overall_start)
) ), file=out)
print >>out, "Something has gone wrong starting the node." print("Something has gone wrong starting the node.", file=out)
print >>out, "Logs are available in '{}'".format(log_fname) print("Logs are available in '{}'".format(log_fname), file=out)
print >>out, "Collected for this run:" print("Collected for this run:", file=out)
print >>out, collected print(collected, file=out)
return 1 return 1

View File

@ -1,3 +1,5 @@
from __future__ import print_function
import os import os
import time import time
import signal import signal
@ -23,16 +25,16 @@ def stop(config):
err = config.stderr err = config.stderr
basedir = config['basedir'] basedir = config['basedir']
quoted_basedir = quote_local_unicode_path(basedir) quoted_basedir = quote_local_unicode_path(basedir)
print >>out, "STOPPING", quoted_basedir print("STOPPING", quoted_basedir, file=out)
pidfile = get_pidfile(basedir) pidfile = get_pidfile(basedir)
pid = get_pid_from_pidfile(pidfile) pid = get_pid_from_pidfile(pidfile)
if pid is None: if pid is None:
print >>err, "%s does not look like a running node directory (no twistd.pid)" % quoted_basedir print("%s does not look like a running node directory (no twistd.pid)" % quoted_basedir, file=err)
# we define rc=2 to mean "nothing is running, but it wasn't me who # we define rc=2 to mean "nothing is running, but it wasn't me who
# stopped it" # stopped it"
return 2 return 2
elif pid == -1: elif pid == -1:
print >>err, "%s contains an invalid PID file" % basedir print("%s contains an invalid PID file" % basedir, file=err)
# we define rc=2 to mean "nothing is running, but it wasn't me who # we define rc=2 to mean "nothing is running, but it wasn't me who
# stopped it" # stopped it"
return 2 return 2
@ -44,7 +46,7 @@ def stop(config):
os.kill(pid, signal.SIGKILL) os.kill(pid, signal.SIGKILL)
except OSError, oserr: except OSError, oserr:
if oserr.errno == 3: if oserr.errno == 3:
print oserr.strerror print(oserr.strerror)
# the process didn't exist, so wipe the pid file # the process didn't exist, so wipe the pid file
os.remove(pidfile) os.remove(pidfile)
return COULD_NOT_STOP return COULD_NOT_STOP
@ -63,20 +65,20 @@ def stop(config):
try: try:
os.kill(pid, 0) os.kill(pid, 0)
except OSError: except OSError:
print >>out, "process %d is dead" % pid print("process %d is dead" % pid, file=out)
return return
wait -= 1 wait -= 1
if wait < 0: if wait < 0:
if first_time: if first_time:
print >>err, ("It looks like pid %d is still running " print("It looks like pid %d is still running "
"after %d seconds" % (pid, "after %d seconds" % (pid,
(time.time() - start))) (time.time() - start)), file=err)
print >>err, "I will keep watching it until you interrupt me." print("I will keep watching it until you interrupt me.", file=err)
wait = 10 wait = 10
first_time = False first_time = False
else: else:
print >>err, "pid %d still running after %d seconds" % \ print("pid %d still running after %d seconds" % \
(pid, (time.time() - start)) (pid, (time.time() - start)), file=err)
wait = 10 wait = 10
time.sleep(1) time.sleep(1)
# control never reaches here: no timeout # control never reaches here: no timeout

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import urllib import urllib
from allmydata.scripts.common_http import do_http, format_http_success, format_http_error from allmydata.scripts.common_http import do_http, format_http_success, format_http_error
@ -22,8 +23,8 @@ def unlink(options, command="unlink"):
e.display(stderr) e.display(stderr)
return 1 return 1
if not path: if not path:
print >>stderr, """ print("""
'tahoe %s' can only unlink directory entries, so a path must be given.""" % (command,) 'tahoe %s' can only unlink directory entries, so a path must be given.""" % (command,), file=stderr)
return 1 return 1
url = nodeurl + "uri/%s" % urllib.quote(rootcap) url = nodeurl + "uri/%s" % urllib.quote(rootcap)
@ -32,8 +33,8 @@ def unlink(options, command="unlink"):
resp = do_http("DELETE", url) resp = do_http("DELETE", url)
if resp.status in (200,): if resp.status in (200,):
print >>stdout, format_http_success(resp) print(format_http_success(resp), file=stdout)
return 0 return 0
print >>stderr, format_http_error("ERROR", resp) print(format_http_error("ERROR", resp), file=stderr)
return 1 return 1