replaced print statement with print fuction for all tahoe_* scripts

This commit is contained in:
heartsucker 2019-03-24 14:09:10 +01:00
parent 70c461dfe2
commit fc417826f1
No known key found for this signature in database
GPG Key ID: C49FAAAA25756E79
18 changed files with 163 additions and 141 deletions

1
newsfragments/3009.other Normal file
View File

@ -0,0 +1 @@
Replaced print statement with print fuction for all tahoe_* scripts.

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import sys
from collections import namedtuple
@ -82,10 +83,10 @@ def get_magicfolderdb(dbfile, stderr=sys.stderr,
if create_version[1] in (1, 2):
return MagicFolderDB(sqlite3, db)
else:
print >>stderr, "invalid magicfolderdb schema version specified"
print("invalid magicfolderdb schema version specified", file=stderr)
return None
except DBError, e:
print >>stderr, e
print(e, file=stderr)
return None
class LocalPath(object):

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import os.path
import codecs
@ -39,22 +40,22 @@ def add_alias(options):
stderr = options.stderr
if u":" in alias:
# a single trailing colon will already have been stripped if present
print >>stderr, "Alias names cannot contain colons."
print("Alias names cannot contain colons.", file=stderr)
return 1
if u" " in alias:
print >>stderr, "Alias names cannot contain spaces."
print("Alias names cannot contain spaces.", file=stderr)
return 1
old_aliases = get_aliases(nodedir)
if alias in old_aliases:
print >>stderr, "Alias %s already exists!" % quote_output(alias)
print("Alias %s already exists!" % quote_output(alias), file=stderr)
return 1
aliasfile = os.path.join(nodedir, "private", "aliases")
cap = uri.from_string_dirnode(cap).to_string()
add_line_to_aliasfile(aliasfile, alias, cap)
print >>stdout, "Alias %s added" % quote_output(alias)
print("Alias %s added" % quote_output(alias), file=stdout)
return 0
def create_alias(options):
@ -66,15 +67,15 @@ def create_alias(options):
stderr = options.stderr
if u":" in alias:
# a single trailing colon will already have been stripped if present
print >>stderr, "Alias names cannot contain colons."
print("Alias names cannot contain colons.", file=stderr)
return 1
if u" " in alias:
print >>stderr, "Alias names cannot contain spaces."
print("Alias names cannot contain spaces.", file=stderr)
return 1
old_aliases = get_aliases(nodedir)
if alias in old_aliases:
print >>stderr, "Alias %s already exists!" % quote_output(alias)
print("Alias %s already exists!" % quote_output(alias), file=stderr)
return 1
aliasfile = os.path.join(nodedir, "private", "aliases")
@ -93,7 +94,7 @@ def create_alias(options):
add_line_to_aliasfile(aliasfile, alias, new_uri)
print >>stdout, "Alias %s created" % (quote_output(alias),)
print("Alias %s created" % (quote_output(alias),), file=stdout)
return 0
@ -124,20 +125,20 @@ def list_aliases(options):
if options['json']:
try:
# XXX why are we presuming utf-8 output?
print >>stdout, json.dumps(data, indent=4).decode('utf-8')
print(json.dumps(data, indent=4).decode('utf-8'), file=stdout)
except (UnicodeEncodeError, UnicodeDecodeError):
print >>stderr, json.dumps(data, indent=4)
print(json.dumps(data, indent=4), file=stderr)
rc = 1
else:
for name, details in data.items():
dircap = details['readonly'] if options['readonly-uri'] else details['readwrite']
try:
print >>stdout, fmt % (unicode_to_output(name), unicode_to_output(dircap.decode('utf-8')))
print(fmt % (unicode_to_output(name), unicode_to_output(dircap.decode('utf-8'))), file=stdout)
except (UnicodeEncodeError, UnicodeDecodeError):
print >>stderr, fmt % (quote_output(name), quote_output(dircap))
print(fmt % (quote_output(name), quote_output(dircap)), file=stderr)
rc = 1
if rc == 1:
print >>stderr, "\nThis listing included aliases or caps that could not be converted to the terminal" \
"\noutput encoding. These are shown using backslash escapes and in quotes."
print("\nThis listing included aliases or caps that could not be converted to the terminal" \
"\noutput encoding. These are shown using backslash escapes and in quotes.", file=stderr)
return rc

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import os.path
import time
@ -88,7 +89,7 @@ class BackerUpper(object):
bdbfile = abspath_expanduser_unicode(bdbfile)
self.backupdb = backupdb.get_backupdb(bdbfile, stderr)
if not self.backupdb:
print >>stderr, "ERROR: Unable to load backup db."
print("ERROR: Unable to load backup db.", file=stderr)
return 1
try:
@ -110,7 +111,7 @@ class BackerUpper(object):
if resp.status == 404:
resp = do_http("POST", archives_url + "?t=mkdir")
if resp.status != 200:
print >>stderr, format_http_error("Unable to create target directory", resp)
print(format_http_error("Unable to create target directory", resp), file=stderr)
return 1
# second step: process the tree
@ -134,11 +135,11 @@ class BackerUpper(object):
put_child(archives_url, now, new_backup_dircap)
put_child(to_url, "Latest", new_backup_dircap)
print >>stdout, completed.report(
print(completed.report(
self.verbosity,
self._files_checked,
self._directories_checked,
)
), file=stdout)
# The command exits with code 2 if files or directories were skipped
if completed.any_skips():
@ -150,11 +151,11 @@ class BackerUpper(object):
def verboseprint(self, msg):
precondition(isinstance(msg, str), msg)
if self.verbosity >= 2:
print >>self.options.stdout, msg
print(msg, file=self.options.stdout)
def warn(self, msg):
precondition(isinstance(msg, str), msg)
print >>self.options.stderr, msg
print(msg, file=self.options.stderr)
def upload_directory(self, path, compare_contents, create_contents):
must_create, r = self.check_backupdb_directory(compare_contents)
@ -323,7 +324,7 @@ def run_backup(
# Currently, BackupProgress is mutable, though, and everything just
# mutates it.
progress = target.backup(progress, upload_file, upload_directory)
print >>stdout, progress.report(datetime.datetime.now())
print(progress.report(datetime.datetime.now()), file=stdout)
return progress.backup_finished()

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import urllib
import json
@ -42,7 +43,7 @@ def check_location(options, where):
resp = do_http("POST", url)
if resp.status != 200:
print >>stderr, format_http_error("ERROR", resp)
print(format_http_error("ERROR", resp), file=stderr)
return 1
jdata = resp.read()
if options.get("raw"):
@ -129,12 +130,12 @@ class DeepCheckOutput(LineOnlyReceiver):
def lineReceived(self, line):
if self.in_error:
print >>self.stderr, quote_output(line, quotemarks=False)
print(quote_output(line, quotemarks=False), file=self.stderr)
return
if line.startswith("ERROR:"):
self.in_error = True
self.streamer.rc = 1
print >>self.stderr, quote_output(line, quotemarks=False)
print(quote_output(line, quotemarks=False), file=self.stderr)
return
d = json.loads(line)
@ -144,7 +145,7 @@ class DeepCheckOutput(LineOnlyReceiver):
self.num_objects += 1
# non-verbose means print a progress marker every 100 files
if self.num_objects % 100 == 0:
print >>stdout, "%d objects checked.." % self.num_objects
print("%d objects checked.." % self.num_objects, file=stdout)
cr = d["check-results"]
if cr["results"]["healthy"]:
self.files_healthy += 1
@ -158,19 +159,19 @@ class DeepCheckOutput(LineOnlyReceiver):
# LIT files and directories do not have a "summary" field.
summary = cr.get("summary", "Healthy (LIT)")
print >>stdout, "%s: %s" % (quote_path(path), quote_output(summary, quotemarks=False))
print("%s: %s" % (quote_path(path), quote_output(summary, quotemarks=False)), file=stdout)
# always print out corrupt shares
for shareloc in cr["results"].get("list-corrupt-shares", []):
(serverid, storage_index, sharenum) = shareloc
print >>stdout, " corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum)
print(" corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum), file=stdout)
def done(self):
if self.in_error:
return
stdout = self.stdout
print >>stdout, "done: %d objects checked, %d healthy, %d unhealthy" \
% (self.num_objects, self.files_healthy, self.files_unhealthy)
print("done: %d objects checked, %d healthy, %d unhealthy" \
% (self.num_objects, self.files_healthy, self.files_unhealthy), file=stdout)
class DeepCheckAndRepairOutput(LineOnlyReceiver):
delimiter = "\n"
@ -192,12 +193,12 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver):
def lineReceived(self, line):
if self.in_error:
print >>self.stderr, quote_output(line, quotemarks=False)
print(quote_output(line, quotemarks=False), file=self.stderr)
return
if line.startswith("ERROR:"):
self.in_error = True
self.streamer.rc = 1
print >>self.stderr, quote_output(line, quotemarks=False)
print(quote_output(line, quotemarks=False), file=self.stderr)
return
d = json.loads(line)
@ -207,7 +208,7 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver):
self.num_objects += 1
# non-verbose means print a progress marker every 100 files
if self.num_objects % 100 == 0:
print >>stdout, "%d objects checked.." % self.num_objects
print("%d objects checked.." % self.num_objects, file=stdout)
crr = d["check-and-repair-results"]
if d["storage-index"]:
if crr["pre-repair-results"]["results"]["healthy"]:
@ -239,36 +240,36 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver):
summary = "healthy"
else:
summary = "not healthy"
print >>stdout, "%s: %s" % (quote_path(path), summary)
print("%s: %s" % (quote_path(path), summary), file=stdout)
# always print out corrupt shares
prr = crr.get("pre-repair-results", {})
for shareloc in prr.get("results", {}).get("list-corrupt-shares", []):
(serverid, storage_index, sharenum) = shareloc
print >>stdout, " corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum)
print(" corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum), file=stdout)
# always print out repairs
if crr["repair-attempted"]:
if crr["repair-successful"]:
print >>stdout, " repair successful"
print(" repair successful", file=stdout)
else:
print >>stdout, " repair failed"
print(" repair failed", file=stdout)
def done(self):
if self.in_error:
return
stdout = self.stdout
print >>stdout, "done: %d objects checked" % self.num_objects
print >>stdout, " pre-repair: %d healthy, %d unhealthy" \
print("done: %d objects checked" % self.num_objects, file=stdout)
print(" pre-repair: %d healthy, %d unhealthy" \
% (self.pre_repair_files_healthy,
self.pre_repair_files_unhealthy)
print >>stdout, " %d repairs attempted, %d successful, %d failed" \
self.pre_repair_files_unhealthy), file=stdout)
print(" %d repairs attempted, %d successful, %d failed" \
% (self.repairs_attempted,
self.repairs_successful,
(self.repairs_attempted - self.repairs_successful))
print >>stdout, " post-repair: %d healthy, %d unhealthy" \
(self.repairs_attempted - self.repairs_successful)), file=stdout)
print(" post-repair: %d healthy, %d unhealthy" \
% (self.post_repair_files_healthy,
self.post_repair_files_unhealthy)
self.post_repair_files_unhealthy), file=stdout)
class DeepCheckStreamer(LineOnlyReceiver):
@ -305,7 +306,7 @@ class DeepCheckStreamer(LineOnlyReceiver):
url += "&add-lease=true"
resp = do_http("POST", url)
if resp.status not in (200, 302):
print >>stderr, format_http_error("ERROR", resp)
print(format_http_error("ERROR", resp), file=stderr)
return 1
# use Twisted to split this into lines

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import os.path
import urllib
@ -480,7 +481,7 @@ class Copier:
self.stderr = options.stderr
if verbosity >= 2 and not self.progressfunc:
def progress(message):
print >>self.stderr, message
print(message, file=self.stderr)
self.progressfunc = progress
self.caps_only = options["caps-only"]
self.cache = {}
@ -490,7 +491,7 @@ class Copier:
except TahoeError, te:
if verbosity >= 2:
Failure().printTraceback(self.stderr)
print >>self.stderr
print(file=self.stderr)
te.display(self.stderr)
return 1
@ -579,7 +580,7 @@ class Copier:
return self.copy_things_to_directory(sources, target)
def to_stderr(self, text):
print >>self.stderr, text
print(text, file=self.stderr)
# FIXME reduce the amount of near-duplicate code between get_target_info
# and get_source_info.
@ -698,7 +699,7 @@ class Copier:
def announce_success(self, msg):
if self.verbosity >= 1:
print >>self.stdout, "Success: %s" % msg
print("Success: %s" % msg, file=self.stdout)
return 0
def copy_file_to_file(self, source, target):

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import os, sys
from allmydata.scripts.common import BasedirOptions
@ -181,13 +182,13 @@ def daemonize(config):
err = config.stderr
basedir = config['basedir']
quoted_basedir = quote_local_unicode_path(basedir)
print >>out, "daemonizing in {}".format(quoted_basedir)
print("daemonizing in {}".format(quoted_basedir), file=out)
if not os.path.isdir(basedir):
print >>err, "%s does not look like a directory at all" % quoted_basedir
print("%s does not look like a directory at all" % quoted_basedir, file=err)
return 1
nodetype = identify_node_type(basedir)
if not nodetype:
print >>err, "%s is not a recognizable node directory" % quoted_basedir
print("%s is not a recognizable node directory" % quoted_basedir, file=err)
return 1
# Now prepare to turn into a twistd process. This os.chdir is the point
# of no return.
@ -207,15 +208,15 @@ def daemonize(config):
twistd_config.parseOptions(twistd_args)
except usage.error, ue:
# these arguments were unsuitable for 'twistd'
print >>err, config
print >>err, "tahoe %s: usage error from twistd: %s\n" % (config.subcommand_name, ue)
print(config, file=err)
print("tahoe %s: usage error from twistd: %s\n" % (config.subcommand_name, ue), file=err)
return 1
twistd_config.loadedPlugins = {"DaemonizeTahoeNode": DaemonizeTahoeNodePlugin(nodetype, basedir)}
# handle invalid PID file (twistd might not start otherwise)
pidfile = get_pidfile(basedir)
if get_pid_from_pidfile(pidfile) == -1:
print >>err, "found invalid PID file in %s - deleting it" % basedir
print("found invalid PID file in %s - deleting it" % basedir, file=err)
os.remove(pidfile)
# On Unix-like platforms:
@ -249,7 +250,7 @@ def daemonize(config):
else:
verb = "starting"
print >>out, "%s node in %s" % (verb, quoted_basedir)
print("%s node in %s" % (verb, quoted_basedir), file=out)
twistd.runApp(twistd_config)
# we should only reach here if --nodaemon or equivalent was used
return 0

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import urllib
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
@ -38,7 +39,7 @@ def get(options):
outf.close()
rc = 0
else:
print >>stderr, format_http_error("Error during GET", resp)
print(format_http_error("Error during GET", resp), file=stderr)
rc = 1
return rc

View File

@ -1,3 +1,5 @@
from __future__ import print_function
import json
from os.path import join
@ -34,19 +36,19 @@ def _send_config_via_wormhole(options, config):
out = options.stdout
err = options.stderr
relay_url = options.parent['wormhole-server']
print >>out, "Connecting to '{}'...".format(relay_url)
print("Connecting to '{}'...".format(relay_url), file=out)
wh = wormhole.create(
appid=options.parent['wormhole-invite-appid'],
relay_url=relay_url,
reactor=reactor,
)
yield wh.get_welcome()
print >>out, "Connected to wormhole server"
print("Connected to wormhole server", file=out)
# must call allocate_code before get_code will ever succeed
wh.allocate_code()
code = yield wh.get_code()
print >>out, "Invite Code for client: {}".format(code)
print("Invite Code for client: {}".format(code), file=out)
wh.send_message(json.dumps({
u"abilities": {
@ -55,16 +57,16 @@ def _send_config_via_wormhole(options, config):
}))
client_intro = yield wh.get_message()
print >>out, " received client introduction"
print(" received client introduction", file=out)
client_intro = json.loads(client_intro)
if not u'abilities' in client_intro:
print >>err, "No 'abilities' from client"
print("No 'abilities' from client", file=err)
defer.returnValue(1)
if not u'client-v1' in client_intro[u'abilities']:
print >>err, "No 'client-v1' in abilities from client"
print("No 'client-v1' in abilities from client", file=err)
defer.returnValue(1)
print >>out, " transmitting configuration"
print(" transmitting configuration", file=out)
wh.send_message(json.dumps(config))
yield wh.close()
@ -82,7 +84,7 @@ def invite(options):
try:
introducer_furl = get_introducer_furl(basedir, config)
except Exception as e:
print >>err, "Can't find introducer FURL for node '{}': {}".format(basedir, str(e))
print("Can't find introducer FURL for node '{}': {}".format(basedir, str(e)), file=err)
raise SystemExit(1)
nick = options['nick']
@ -96,7 +98,7 @@ def invite(options):
}
yield _send_config_via_wormhole(options, remote_config)
print >>out, "Completed successfully"
print("Completed successfully", file=out)
subCommands = [

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import urllib, time
import json
@ -30,10 +31,10 @@ def list(options):
url += "?t=json"
resp = do_http("GET", url)
if resp.status == 404:
print >>stderr, "No such file or directory"
print("No such file or directory", file=stderr)
return 2
if resp.status != 200:
print >>stderr, format_http_error("Error during GET", resp)
print(format_http_error("Error during GET", resp), file=stderr)
if resp.status == 0:
return 3
else:
@ -44,19 +45,19 @@ def list(options):
if options['json']:
# The webapi server should always output printable ASCII.
if is_printable_ascii(data):
print >>stdout, data
print(data, file=stdout)
return 0
else:
print >>stderr, "The JSON response contained unprintable characters:"
print >>stderr, quote_output(data, quotemarks=False)
print("The JSON response contained unprintable characters:", file=stderr)
print(quote_output(data, quotemarks=False), file=stderr)
return 1
try:
parsed = json.loads(data)
except Exception, e:
print >>stderr, "error: %s" % quote_output(e.args[0], quotemarks=False)
print >>stderr, "Could not parse JSON response:"
print >>stderr, quote_output(data, quotemarks=False)
print("error: %s" % quote_output(e.args[0], quotemarks=False), file=stderr)
print("Could not parse JSON response:", file=stderr)
print(quote_output(data, quotemarks=False), file=stderr)
return 1
nodetype, d = parsed
@ -180,16 +181,16 @@ def list(options):
rc = 0
for (encoding_error, row) in rows:
if encoding_error:
print >>stderr, (fmt % tuple(row)).rstrip()
print((fmt % tuple(row)).rstrip(), file=stderr)
rc = 1
else:
print >>stdout, (fmt % tuple(row)).rstrip()
print((fmt % tuple(row)).rstrip(), file=stdout)
if rc == 1:
print >>stderr, "\nThis listing included files whose names could not be converted to the terminal" \
"\noutput encoding. Their names are shown using backslash escapes and in quotes."
print("\nThis listing included files whose names could not be converted to the terminal" \
"\noutput encoding. Their names are shown using backslash escapes and in quotes.", file=stderr)
if has_unknowns:
print >>stderr, "\nThis listing included unknown objects. Using a webapi server that supports" \
"\na later version of Tahoe may help."
print("\nThis listing included unknown objects. Using a webapi server that supports" \
"\na later version of Tahoe may help.", file=stderr)
return rc

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import urllib, json
from twisted.protocols.basic import LineOnlyReceiver
@ -41,7 +42,7 @@ class ManifestStreamer(LineOnlyReceiver):
url += "?t=stream-manifest"
resp = do_http("POST", url)
if resp.status not in (200, 302):
print >>stderr, format_http_error("ERROR", resp)
print(format_http_error("ERROR", resp), file=stderr)
return 1
#print "RESP", dir(resp)
# use Twisted to split this into lines
@ -60,35 +61,35 @@ class ManifestStreamer(LineOnlyReceiver):
stdout = self.options.stdout
stderr = self.options.stderr
if self.in_error:
print >>stderr, quote_output(line, quotemarks=False)
print(quote_output(line, quotemarks=False), file=stderr)
return
if line.startswith("ERROR:"):
self.in_error = True
self.rc = 1
print >>stderr, quote_output(line, quotemarks=False)
print(quote_output(line, quotemarks=False), file=stderr)
return
try:
d = json.loads(line.decode('utf-8'))
except Exception, e:
print >>stderr, "ERROR could not decode/parse %s\nERROR %r" % (quote_output(line), e)
print("ERROR could not decode/parse %s\nERROR %r" % (quote_output(line), e), file=stderr)
else:
if d["type"] in ("file", "directory"):
if self.options["storage-index"]:
si = d.get("storage-index", None)
if si:
print >>stdout, quote_output(si, quotemarks=False)
print(quote_output(si, quotemarks=False), file=stdout)
elif self.options["verify-cap"]:
vc = d.get("verifycap", None)
if vc:
print >>stdout, quote_output(vc, quotemarks=False)
print(quote_output(vc, quotemarks=False), file=stdout)
elif self.options["repair-cap"]:
vc = d.get("repaircap", None)
if vc:
print >>stdout, quote_output(vc, quotemarks=False)
print(quote_output(vc, quotemarks=False), file=stdout)
else:
print >>stdout, "%s %s" % (quote_output(d["cap"], quotemarks=False),
quote_path(d["path"], quotemarks=False))
print("%s %s" % (quote_output(d["cap"], quotemarks=False),
quote_path(d["path"], quotemarks=False)), file=stdout)
def manifest(options):
return ManifestStreamer().run(options)
@ -113,18 +114,18 @@ class StatsGrabber(SlowOperationRunner):
"largest-immutable-file",
)
width = max([len(k) for k in keys])
print >>stdout, "Counts and Total Sizes:"
print("Counts and Total Sizes:", file=stdout)
for k in keys:
fmt = "%" + str(width) + "s: %d"
if k in data:
value = data[k]
if not k.startswith("count-") and value > 1000:
absize = abbreviate_space_both(value)
print >>stdout, fmt % (k, data[k]), " ", absize
print(fmt % (k, data[k]), " ", absize, file=stdout)
else:
print >>stdout, fmt % (k, data[k])
print(fmt % (k, data[k]), file=stdout)
if data["size-files-histogram"]:
print >>stdout, "Size Histogram:"
print("Size Histogram:", file=stdout)
prevmax = None
maxlen = max([len(str(maxsize))
for (minsize, maxsize, count)
@ -138,10 +139,10 @@ class StatsGrabber(SlowOperationRunner):
linefmt = minfmt + "-" + maxfmt + " : " + countfmt + " %s"
for (minsize, maxsize, count) in data["size-files-histogram"]:
if prevmax is not None and minsize != prevmax+1:
print >>stdout, " "*(maxlen-1) + "..."
print(" "*(maxlen-1) + "...", file=stdout)
prevmax = maxsize
print >>stdout, linefmt % (minsize, maxsize, count,
abbreviate_space_both(maxsize))
print(linefmt % (minsize, maxsize, count,
abbreviate_space_both(maxsize)), file=stdout)
def stats(options):
return StatsGrabber().run(options)

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import urllib
from allmydata.scripts.common_http import do_http, check_http_error
@ -30,7 +31,7 @@ def mkdir(options):
return rc
new_uri = resp.read().strip()
# emit its write-cap
print >>stdout, quote_output(new_uri, quotemarks=False)
print(quote_output(new_uri, quotemarks=False), file=stdout)
return 0
# create a new directory at the given location
@ -45,5 +46,5 @@ def mkdir(options):
resp = do_http("POST", url)
check_http_error(resp, stderr)
new_uri = resp.read().strip()
print >>stdout, quote_output(new_uri, quotemarks=False)
print(quote_output(new_uri, quotemarks=False), file=stdout)
return 0

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import re
import urllib
@ -30,7 +31,7 @@ def mv(options, mode="move"):
# figure out the source cap
resp = do_http("GET", from_url + "?t=json")
if not re.search(r'^2\d\d$', str(resp.status)):
print >>stderr, format_http_error("Error", resp)
print(format_http_error("Error", resp), file=stderr)
return 1
data = resp.read()
nodetype, attrs = json.loads(data)
@ -56,19 +57,19 @@ def mv(options, mode="move"):
status = resp.status
if not re.search(r'^2\d\d$', str(status)):
if status == 409:
print >>stderr, "Error: You can't overwrite a directory with a file"
print("Error: You can't overwrite a directory with a file", file=stderr)
else:
print >>stderr, format_http_error("Error", resp)
print(format_http_error("Error", resp), file=stderr)
if mode == "move":
print >>stderr, "NOT removing the original"
print("NOT removing the original", file=stderr)
return 1
if mode == "move":
# now remove the original
resp = do_http("DELETE", from_url)
if not re.search(r'^2\d\d$', str(resp.status)):
print >>stderr, format_http_error("Error deleting original after move", resp)
print(format_http_error("Error deleting original after move", resp), file=stderr)
return 2
print >>stdout, "OK"
print("OK", file=stdout)
return 0

View File

@ -1,3 +1,4 @@
from __future__ import print_function
from cStringIO import StringIO
import urllib
@ -54,8 +55,8 @@ def put(options):
return 1
if path.startswith("/"):
suggestion = to_file.replace(u"/", u"", 1)
print >>stderr, "Error: The remote filename must not start with a slash"
print >>stderr, "Please try again, perhaps with %s" % quote_output(suggestion)
print("Error: The remote filename must not start with a slash", file=stderr)
print("Please try again, perhaps with %s" % quote_output(suggestion), file=stderr)
return 1
url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
if path:
@ -78,16 +79,16 @@ def put(options):
# do_http() can't use stdin directly: for one thing, we need a
# Content-Length field. So we currently must copy it.
if verbosity > 0:
print >>stderr, "waiting for file data on stdin.."
print("waiting for file data on stdin..", file=stderr)
data = stdin.read()
infileobj = StringIO(data)
resp = do_http("PUT", url, infileobj)
if resp.status in (200, 201,):
print >>stderr, format_http_success(resp)
print >>stdout, quote_output(resp.read(), quotemarks=False)
print(format_http_success(resp), file=stderr)
print(quote_output(resp.read(), quotemarks=False), file=stdout)
return 0
print >>stderr, format_http_error("Error", resp)
print(format_http_error("Error", resp), file=stderr)
return 1

View File

@ -1,3 +1,5 @@
from __future__ import print_function
from .tahoe_start import StartOptions, start
from .tahoe_stop import stop, COULD_NOT_STOP
@ -10,9 +12,9 @@ def restart(config):
stderr = config.stderr
rc = stop(config)
if rc == COULD_NOT_STOP:
print >>stderr, "ignoring couldn't-stop"
print("ignoring couldn't-stop", file=stderr)
rc = 0
if rc:
print >>stderr, "not restarting"
print("not restarting", file=stderr)
return rc
return start(config)

View File

@ -1,3 +1,5 @@
from __future__ import print_function
import os
import io
import sys
@ -63,13 +65,13 @@ def start(config):
err = config.stderr
basedir = config['basedir']
quoted_basedir = quote_local_unicode_path(basedir)
print >>out, "STARTING", quoted_basedir
print("STARTING", quoted_basedir, file=out)
if not os.path.isdir(basedir):
print >>err, "%s does not look like a directory at all" % quoted_basedir
print("%s does not look like a directory at all" % quoted_basedir, file=err)
return 1
nodetype = identify_node_type(basedir)
if not nodetype:
print >>err, "%s is not a recognizable node directory" % quoted_basedir
print("%s is not a recognizable node directory" % quoted_basedir, file=err)
return 1
# "tahoe start" attempts to monitor the logs for successful
@ -86,7 +88,7 @@ def start(config):
return 0
if not can_monitor_logs:
print >>out, "Custom logging options; can't monitor logs for proper startup messages"
print("Custom logging options; can't monitor logs for proper startup messages", file=out)
return 1
# before we spawn tahoe, we check if "the log file" exists or not,
@ -130,21 +132,21 @@ def start(config):
collected += f.read()
if magic_string in collected:
if not config.parent['quiet']:
print >>out, "Node has started successfully"
print("Node has started successfully", file=out)
return 0
if 'Traceback ' in collected:
print >>err, "Error starting node; see '{}' for more:\n\n{}".format(
print("Error starting node; see '{}' for more:\n\n{}".format(
log_fname,
collected,
)
), file=err)
return 1
time.sleep(0.1)
print >>out, "Still waiting up to {}s for node startup".format(
print("Still waiting up to {}s for node startup".format(
60 - int(time.time() - overall_start)
)
), file=out)
print >>out, "Something has gone wrong starting the node."
print >>out, "Logs are available in '{}'".format(log_fname)
print >>out, "Collected for this run:"
print >>out, collected
print("Something has gone wrong starting the node.", file=out)
print("Logs are available in '{}'".format(log_fname), file=out)
print("Collected for this run:", file=out)
print(collected, file=out)
return 1

View File

@ -1,3 +1,5 @@
from __future__ import print_function
import os
import time
import signal
@ -23,16 +25,16 @@ def stop(config):
err = config.stderr
basedir = config['basedir']
quoted_basedir = quote_local_unicode_path(basedir)
print >>out, "STOPPING", quoted_basedir
print("STOPPING", quoted_basedir, file=out)
pidfile = get_pidfile(basedir)
pid = get_pid_from_pidfile(pidfile)
if pid is None:
print >>err, "%s does not look like a running node directory (no twistd.pid)" % quoted_basedir
print("%s does not look like a running node directory (no twistd.pid)" % quoted_basedir, file=err)
# we define rc=2 to mean "nothing is running, but it wasn't me who
# stopped it"
return 2
elif pid == -1:
print >>err, "%s contains an invalid PID file" % basedir
print("%s contains an invalid PID file" % basedir, file=err)
# we define rc=2 to mean "nothing is running, but it wasn't me who
# stopped it"
return 2
@ -44,7 +46,7 @@ def stop(config):
os.kill(pid, signal.SIGKILL)
except OSError, oserr:
if oserr.errno == 3:
print oserr.strerror
print(oserr.strerror)
# the process didn't exist, so wipe the pid file
os.remove(pidfile)
return COULD_NOT_STOP
@ -63,20 +65,20 @@ def stop(config):
try:
os.kill(pid, 0)
except OSError:
print >>out, "process %d is dead" % pid
print("process %d is dead" % pid, file=out)
return
wait -= 1
if wait < 0:
if first_time:
print >>err, ("It looks like pid %d is still running "
"after %d seconds" % (pid,
(time.time() - start)))
print >>err, "I will keep watching it until you interrupt me."
print("It looks like pid %d is still running "
"after %d seconds" % (pid,
(time.time() - start)), file=err)
print("I will keep watching it until you interrupt me.", file=err)
wait = 10
first_time = False
else:
print >>err, "pid %d still running after %d seconds" % \
(pid, (time.time() - start))
print("pid %d still running after %d seconds" % \
(pid, (time.time() - start)), file=err)
wait = 10
time.sleep(1)
# control never reaches here: no timeout

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import urllib
from allmydata.scripts.common_http import do_http, format_http_success, format_http_error
@ -22,8 +23,8 @@ def unlink(options, command="unlink"):
e.display(stderr)
return 1
if not path:
print >>stderr, """
'tahoe %s' can only unlink directory entries, so a path must be given.""" % (command,)
print("""
'tahoe %s' can only unlink directory entries, so a path must be given.""" % (command,), file=stderr)
return 1
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
@ -32,8 +33,8 @@ def unlink(options, command="unlink"):
resp = do_http("DELETE", url)
if resp.status in (200,):
print >>stdout, format_http_success(resp)
print(format_http_success(resp), file=stdout)
return 0
print >>stderr, format_http_error("ERROR", resp)
print(format_http_error("ERROR", resp), file=stderr)
return 1