2009-01-15 03:00:15 +00:00
|
|
|
import datetime, os.path, re, types, ConfigParser, tempfile
|
2007-08-12 17:29:38 +00:00
|
|
|
from base64 import b32decode, b32encode
|
2007-05-22 21:01:40 +00:00
|
|
|
|
2008-09-20 17:35:45 +00:00
|
|
|
from twisted.python import log as twlog
|
2007-03-08 22:10:36 +00:00
|
|
|
from twisted.application import service
|
2016-04-27 04:54:45 +00:00
|
|
|
from foolscap.api import Tub, app_versions
|
2008-07-07 06:49:08 +00:00
|
|
|
import foolscap.logging.log
|
2008-09-23 00:03:51 +00:00
|
|
|
from allmydata import get_package_versions, get_package_versions_string
|
2008-07-03 00:40:29 +00:00
|
|
|
from allmydata.util import log
|
2016-04-27 04:54:45 +00:00
|
|
|
from allmydata.util import fileutil, iputil
|
|
|
|
from allmydata.util.assertutil import _assert
|
2010-07-22 00:14:18 +00:00
|
|
|
from allmydata.util.fileutil import abspath_expanduser_unicode
|
2011-08-03 16:38:48 +00:00
|
|
|
from allmydata.util.encodingutil import get_filesystem_encoding, quote_output
|
2015-12-21 21:59:15 +00:00
|
|
|
from allmydata.util import configutil
|
2007-12-13 03:31:01 +00:00
|
|
|
|
2007-12-21 21:42:38 +00:00
|
|
|
# Add our application versions to the data that Foolscap's LogPublisher
|
2008-09-23 00:03:51 +00:00
|
|
|
# reports.
|
2008-09-23 00:13:47 +00:00
|
|
|
for thing, things_version in get_package_versions().iteritems():
|
2008-09-23 00:03:51 +00:00
|
|
|
app_versions.add_version(thing, str(things_version))
|
2007-05-21 20:42:51 +00:00
|
|
|
|
2007-05-22 21:01:40 +00:00
|
|
|
# group 1 will be addr (dotted quad string), group 3 if any will be portnum (string)
|
|
|
|
ADDR_RE=re.compile("^([1-9][0-9]*\.[1-9][0-9]*\.[1-9][0-9]*\.[1-9][0-9]*)(:([1-9][0-9]*))?$")
|
|
|
|
|
2007-10-12 00:30:07 +00:00
|
|
|
|
2007-10-15 03:43:11 +00:00
|
|
|
def formatTimeTahoeStyle(self, when):
|
|
|
|
# we want UTC timestamps that look like:
|
|
|
|
# 2007-10-12 00:26:28.566Z [Client] rnp752lz: 'client running'
|
2007-10-15 03:46:51 +00:00
|
|
|
d = datetime.datetime.utcfromtimestamp(when)
|
|
|
|
if d.microsecond:
|
|
|
|
return d.isoformat(" ")[:-3]+"Z"
|
|
|
|
else:
|
|
|
|
return d.isoformat(" ") + ".000Z"
|
2007-10-12 00:30:07 +00:00
|
|
|
|
2007-12-17 23:39:54 +00:00
|
|
|
PRIV_README="""
|
|
|
|
This directory contains files which contain private data for the Tahoe node,
|
|
|
|
such as private keys. On Unix-like systems, the permissions on this directory
|
|
|
|
are set to disallow users other than its owner from reading the contents of
|
2010-11-28 17:34:44 +00:00
|
|
|
the files. See the 'configuration.rst' documentation file for details."""
|
2007-12-17 23:39:54 +00:00
|
|
|
|
2008-09-30 23:21:49 +00:00
|
|
|
class _None: # used as a marker in get_config()
|
|
|
|
pass
|
|
|
|
|
|
|
|
class MissingConfigEntry(Exception):
|
2011-08-01 23:24:23 +00:00
|
|
|
""" A required config entry was not found. """
|
|
|
|
|
|
|
|
class OldConfigError(Exception):
|
|
|
|
""" An obsolete config file was found. See
|
|
|
|
docs/historical/configuration.rst. """
|
2011-08-03 17:45:46 +00:00
|
|
|
def __str__(self):
|
|
|
|
return ("Found pre-Tahoe-LAFS-v1.3 configuration file(s):\n"
|
|
|
|
"%s\n"
|
|
|
|
"See docs/historical/configuration.rst."
|
|
|
|
% "\n".join([quote_output(fname) for fname in self.args[0]]))
|
|
|
|
|
2011-11-20 23:24:26 +00:00
|
|
|
class OldConfigOptionError(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-05-05 21:55:50 +00:00
|
|
|
class UnescapedHashError(Exception):
|
|
|
|
def __str__(self):
|
|
|
|
return ("The configuration entry %s contained an unescaped '#' character."
|
2014-05-05 22:14:48 +00:00
|
|
|
% quote_output("[%s]%s = %s" % self.args))
|
2014-05-05 21:55:50 +00:00
|
|
|
|
2008-09-30 23:21:49 +00:00
|
|
|
|
2006-12-03 01:27:18 +00:00
|
|
|
class Node(service.MultiService):
|
2007-12-03 21:52:42 +00:00
|
|
|
# this implements common functionality of both Client nodes and Introducer
|
|
|
|
# nodes.
|
2006-12-03 01:27:18 +00:00
|
|
|
NODETYPE = "unknown NODETYPE"
|
|
|
|
PORTNUMFILE = None
|
2007-05-23 19:48:52 +00:00
|
|
|
CERTFILE = "node.pem"
|
2011-08-03 01:32:12 +00:00
|
|
|
GENERATED_FILES = []
|
2006-12-03 01:27:18 +00:00
|
|
|
|
2010-07-22 00:14:18 +00:00
|
|
|
def __init__(self, basedir=u"."):
|
2006-12-03 01:27:18 +00:00
|
|
|
service.MultiService.__init__(self)
|
2010-07-22 00:14:18 +00:00
|
|
|
self.basedir = abspath_expanduser_unicode(unicode(basedir))
|
2008-09-30 23:21:49 +00:00
|
|
|
self._portnumfile = os.path.join(self.basedir, self.PORTNUMFILE)
|
2007-12-17 23:39:54 +00:00
|
|
|
fileutil.make_dirs(os.path.join(self.basedir, "private"), 0700)
|
|
|
|
open(os.path.join(self.basedir, "private", "README"), "w").write(PRIV_README)
|
2008-09-30 23:21:49 +00:00
|
|
|
|
2011-08-03 01:32:12 +00:00
|
|
|
# creates self.config
|
2008-09-30 23:21:49 +00:00
|
|
|
self.read_config()
|
|
|
|
nickname_utf8 = self.get_config("node", "nickname", "<unspecified>")
|
|
|
|
self.nickname = nickname_utf8.decode("utf-8")
|
2009-06-23 02:10:47 +00:00
|
|
|
assert type(self.nickname) is unicode
|
2008-09-30 23:21:49 +00:00
|
|
|
|
2009-01-15 03:00:15 +00:00
|
|
|
self.init_tempdir()
|
2008-09-30 23:21:49 +00:00
|
|
|
self.create_tub()
|
2016-06-30 13:04:23 +00:00
|
|
|
self.create_control_tub()
|
2016-07-07 02:47:00 +00:00
|
|
|
self.create_log_tub()
|
2008-09-30 23:21:49 +00:00
|
|
|
self.logSource="Node"
|
|
|
|
|
|
|
|
self.setup_logging()
|
|
|
|
self.log("Node constructed. " + get_package_versions_string())
|
|
|
|
iputil.increase_rlimits()
|
|
|
|
|
2009-01-15 03:00:15 +00:00
|
|
|
def init_tempdir(self):
|
2015-01-30 00:50:18 +00:00
|
|
|
tempdir_config = self.get_config("node", "tempdir", "tmp").decode('utf-8')
|
|
|
|
tempdir = abspath_expanduser_unicode(tempdir_config, base=self.basedir)
|
2009-01-15 03:00:15 +00:00
|
|
|
if not os.path.exists(tempdir):
|
|
|
|
fileutil.make_dirs(tempdir)
|
2015-01-30 00:50:18 +00:00
|
|
|
tempfile.tempdir = tempdir
|
2009-01-15 03:00:15 +00:00
|
|
|
# this should cause twisted.web.http (which uses
|
|
|
|
# tempfile.TemporaryFile) to put large request bodies in the given
|
|
|
|
# directory. Without this, the default temp dir is usually /tmp/,
|
|
|
|
# which is frequently too small.
|
|
|
|
test_name = tempfile.mktemp()
|
|
|
|
_assert(os.path.dirname(test_name) == tempdir, test_name, tempdir)
|
|
|
|
|
2014-05-05 21:55:50 +00:00
|
|
|
@staticmethod
|
|
|
|
def _contains_unescaped_hash(item):
|
|
|
|
characters = iter(item)
|
|
|
|
for c in characters:
|
|
|
|
if c == '\\':
|
|
|
|
characters.next()
|
|
|
|
elif c == '#':
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2008-09-30 23:21:49 +00:00
|
|
|
def get_config(self, section, option, default=_None, boolean=False):
|
|
|
|
try:
|
|
|
|
if boolean:
|
|
|
|
return self.config.getboolean(section, option)
|
2014-05-05 21:55:50 +00:00
|
|
|
|
|
|
|
item = self.config.get(section, option)
|
|
|
|
if option.endswith(".furl") and self._contains_unescaped_hash(item):
|
2014-05-05 22:14:48 +00:00
|
|
|
raise UnescapedHashError(section, option, item)
|
2014-05-05 21:55:50 +00:00
|
|
|
|
|
|
|
return item
|
2008-09-30 23:21:49 +00:00
|
|
|
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
|
|
|
|
if default is _None:
|
2011-08-06 22:10:07 +00:00
|
|
|
fn = os.path.join(self.basedir, u"tahoe.cfg")
|
2008-09-30 23:21:49 +00:00
|
|
|
raise MissingConfigEntry("%s is missing the [%s]%s entry"
|
2011-08-06 22:10:07 +00:00
|
|
|
% (quote_output(fn), section, option))
|
2008-09-30 23:21:49 +00:00
|
|
|
return default
|
|
|
|
|
|
|
|
def read_config(self):
|
2011-08-03 01:32:12 +00:00
|
|
|
self.error_about_old_config_files()
|
2008-09-30 23:21:49 +00:00
|
|
|
self.config = ConfigParser.SafeConfigParser()
|
2011-08-08 18:02:04 +00:00
|
|
|
|
|
|
|
tahoe_cfg = os.path.join(self.basedir, "tahoe.cfg")
|
|
|
|
try:
|
2015-12-21 21:59:15 +00:00
|
|
|
self.config = configutil.get_config(tahoe_cfg)
|
2011-08-08 18:02:04 +00:00
|
|
|
except EnvironmentError:
|
|
|
|
if os.path.exists(tahoe_cfg):
|
|
|
|
raise
|
2011-08-01 23:24:23 +00:00
|
|
|
|
2011-08-03 01:32:12 +00:00
|
|
|
def error_about_old_config_files(self):
|
2011-08-01 23:24:23 +00:00
|
|
|
""" If any old configuration files are detected, raise OldConfigError. """
|
|
|
|
|
|
|
|
oldfnames = set()
|
|
|
|
for name in [
|
|
|
|
'nickname', 'webport', 'keepalive_timeout', 'log_gatherer.furl',
|
|
|
|
'disconnect_timeout', 'advertised_ip_addresses', 'introducer.furl',
|
|
|
|
'helper.furl', 'key_generator.furl', 'stats_gatherer.furl',
|
|
|
|
'no_storage', 'readonly_storage', 'sizelimit',
|
|
|
|
'debug_discard_storage', 'run_helper']:
|
2011-08-03 01:32:12 +00:00
|
|
|
if name not in self.GENERATED_FILES:
|
|
|
|
fullfname = os.path.join(self.basedir, name)
|
|
|
|
if os.path.exists(fullfname):
|
|
|
|
oldfnames.add(fullfname)
|
2011-08-01 23:24:23 +00:00
|
|
|
if oldfnames:
|
2011-08-03 17:45:46 +00:00
|
|
|
e = OldConfigError(oldfnames)
|
|
|
|
twlog.msg(e)
|
|
|
|
raise e
|
2008-09-30 23:21:49 +00:00
|
|
|
|
2016-04-27 00:56:08 +00:00
|
|
|
def _convert_tub_port(self, s):
|
|
|
|
if re.search(r'^\d+$', s):
|
|
|
|
return "tcp:%d" % int(s)
|
|
|
|
return s
|
|
|
|
|
|
|
|
def get_tub_port(self):
|
|
|
|
# return a descriptor string
|
|
|
|
cfg_tubport = self.get_config("node", "tub.port", "")
|
|
|
|
if cfg_tubport:
|
|
|
|
return self._convert_tub_port(cfg_tubport)
|
|
|
|
# For 'tub.port', tahoe.cfg overrides the individual file on disk. So
|
|
|
|
# only read self._portnumfile if tahoe.cfg doesn't provide a value.
|
|
|
|
if os.path.exists(self._portnumfile):
|
|
|
|
file_tubport = fileutil.read(self._portnumfile).strip()
|
|
|
|
return self._convert_tub_port(file_tubport)
|
|
|
|
tubport = "tcp:%d" % iputil.allocate_tcp_port()
|
|
|
|
fileutil.write_atomically(self._portnumfile, tubport + "\n", mode="")
|
|
|
|
return tubport
|
|
|
|
|
2016-04-27 01:21:36 +00:00
|
|
|
def get_tub_location(self, tubport):
|
|
|
|
location = self.get_config("node", "tub.location", "AUTO")
|
|
|
|
# Replace the location "AUTO", if present, with the detected local
|
|
|
|
# addresses. Don't probe for local addresses unless necessary.
|
|
|
|
split_location = location.split(",")
|
|
|
|
if "AUTO" in split_location:
|
|
|
|
local_addresses = iputil.get_local_addresses_sync()
|
|
|
|
# tubport must be like "tcp:12345" or "tcp:12345:morestuff"
|
|
|
|
local_portnum = int(tubport.split(":")[1])
|
|
|
|
new_locations = []
|
|
|
|
for loc in split_location:
|
|
|
|
if loc == "AUTO":
|
|
|
|
new_locations.extend(["tcp:%s:%d" % (ip, local_portnum)
|
|
|
|
for ip in local_addresses])
|
|
|
|
else:
|
|
|
|
new_locations.append(loc)
|
|
|
|
return ",".join(new_locations)
|
|
|
|
|
2016-06-30 13:04:23 +00:00
|
|
|
def create_tub(self):
|
|
|
|
certfile = os.path.join(self.basedir, "private", self.CERTFILE)
|
|
|
|
self.tub = Tub(certFile=certfile)
|
|
|
|
self.tub_options = {
|
2016-07-07 02:47:00 +00:00
|
|
|
"logLocalFailures": True,
|
|
|
|
"logRemoteFailures": True,
|
|
|
|
"expose-remote-exception-types": False,
|
2016-06-30 13:04:23 +00:00
|
|
|
}
|
2008-09-24 17:51:12 +00:00
|
|
|
|
2008-09-30 23:21:49 +00:00
|
|
|
# see #521 for a discussion of how to pick these timeout values.
|
|
|
|
keepalive_timeout_s = self.get_config("node", "timeout.keepalive", "")
|
2008-09-24 17:51:12 +00:00
|
|
|
if keepalive_timeout_s:
|
2016-05-03 18:38:20 +00:00
|
|
|
self.tub_options["keepaliveTimeout"] = int(keepalive_timeout_s)
|
2008-09-30 23:21:49 +00:00
|
|
|
disconnect_timeout_s = self.get_config("node", "timeout.disconnect", "")
|
2008-09-24 17:51:12 +00:00
|
|
|
if disconnect_timeout_s:
|
|
|
|
# N.B.: this is in seconds, so use "1800" to get 30min
|
2016-05-03 18:38:20 +00:00
|
|
|
self.tub_options["disconnectTimeout"] = int(disconnect_timeout_s)
|
|
|
|
for (name, value) in self.tub_options.items():
|
|
|
|
self.tub.setOption(name, value)
|
2008-09-24 17:51:12 +00:00
|
|
|
|
2007-08-12 17:29:38 +00:00
|
|
|
self.nodeid = b32decode(self.tub.tubID.upper()) # binary format
|
2007-08-28 01:58:39 +00:00
|
|
|
self.write_config("my_nodeid", b32encode(self.nodeid).lower() + "\n")
|
2007-08-12 17:29:38 +00:00
|
|
|
self.short_nodeid = b32encode(self.nodeid).lower()[:8] # ready for printing
|
2016-04-27 00:56:08 +00:00
|
|
|
tubport = self.get_tub_port()
|
|
|
|
if tubport in ("0", "tcp:0"):
|
|
|
|
raise ValueError("tub.port cannot be 0: you must choose")
|
2008-09-30 23:21:49 +00:00
|
|
|
self.tub.listenOn(tubport)
|
2016-04-27 01:21:36 +00:00
|
|
|
|
|
|
|
location = self.get_tub_location(tubport)
|
|
|
|
self.tub.setLocation(location)
|
|
|
|
self.log("Tub location set to %s" % (location,))
|
|
|
|
|
2016-04-27 04:54:45 +00:00
|
|
|
# the Tub is now ready for tub.registerReference()
|
2006-12-03 01:27:18 +00:00
|
|
|
self.tub.setServiceParent(self)
|
|
|
|
|
2016-07-07 02:47:00 +00:00
|
|
|
def create_control_tub(self):
|
|
|
|
# the control port uses a localhost-only ephemeral Tub, with no
|
|
|
|
# control over the listening port or location
|
|
|
|
self.control_tub = Tub()
|
|
|
|
portnum = iputil.allocate_tcp_port()
|
|
|
|
port = "tcp:%d:interface=127.0.0.1" % portnum
|
|
|
|
location = "tcp:127.0.0.1:%d" % portnum
|
|
|
|
self.control_tub.listenOn(port)
|
|
|
|
self.control_tub.setLocation(location)
|
|
|
|
self.log("Control Tub location set to %s" % (location,))
|
|
|
|
self.control_tub.setServiceParent(self)
|
|
|
|
|
|
|
|
def create_log_tub(self):
|
|
|
|
# The logport uses a localhost-only ephemeral Tub, with no control
|
|
|
|
# over the listening port or location. This might change if we
|
|
|
|
# discover a compelling reason for it in the future (e.g. being able
|
|
|
|
# to use "flogtool tail" against a remote server), but for now I
|
|
|
|
# think we can live without it.
|
|
|
|
self.log_tub = Tub()
|
|
|
|
portnum = iputil.allocate_tcp_port()
|
|
|
|
port = "tcp:%d:interface=127.0.0.1" % portnum
|
|
|
|
location = "tcp:127.0.0.1:%d" % portnum
|
|
|
|
self.log_tub.listenOn(port)
|
|
|
|
self.log_tub.setLocation(location)
|
|
|
|
self.log("Log Tub location set to %s" % (location,))
|
|
|
|
self.log_tub.setServiceParent(self)
|
|
|
|
|
2008-09-20 18:38:53 +00:00
|
|
|
def get_app_versions(self):
|
|
|
|
# TODO: merge this with allmydata.get_package_versions
|
|
|
|
return dict(app_versions.versions)
|
|
|
|
|
new introducer: signed extensible dictionary-based messages! refs #466
This introduces new client and server halves to the Introducer (renaming the
old one with a _V1 suffix). Both have fallbacks to accomodate talking to a
different version: the publishing client switches on whether the server's
.get_version() advertises V2 support, the server switches on which
subscription method was invoked by the subscribing client.
The V2 protocol sends a three-tuple of (serialized announcement dictionary,
signature, pubkey) for each announcement. The V2 server dispatches messages
to subscribers according to the service-name, and throws errors for invalid
signatures, but does not otherwise examine the messages. The V2 receiver's
subscription callback will receive a (serverid, ann_dict) pair. The
'serverid' will be equal to the pubkey if all of the following are true:
the originating client is V2, and was told a privkey to use
the announcement went through a V2 server
the signature is valid
If not, 'serverid' will be equal to the tubid portion of the announced FURL,
as was the case for V1 receivers.
Servers will create a keypair if one does not exist yet, stored in
private/server.privkey .
The signed announcement dictionary puts the server FURL in a key named
"anonymous-storage-FURL", which anticipates upcoming Accounting-related
changes in the server advertisements. It also provides a key named
"permutation-seed-base32" to tell clients what permutation seed to use. This
is computed at startup, using tubid if there are existing shares, otherwise
the pubkey, to retain share-order compatibility for existing servers.
2011-11-20 10:21:32 +00:00
|
|
|
def get_config_from_file(self, name, required=False):
|
|
|
|
"""Get the (string) contents of a config file, or None if the file
|
|
|
|
did not exist. If required=True, raise an exception rather than
|
|
|
|
returning None. Any leading or trailing whitespace will be stripped
|
|
|
|
from the data."""
|
|
|
|
fn = os.path.join(self.basedir, name)
|
|
|
|
try:
|
|
|
|
return fileutil.read(fn).strip()
|
|
|
|
except EnvironmentError:
|
|
|
|
if not required:
|
|
|
|
return None
|
|
|
|
raise
|
|
|
|
|
2007-12-17 23:39:54 +00:00
|
|
|
def write_private_config(self, name, value):
|
|
|
|
"""Write the (string) contents of a private config file (which is a
|
|
|
|
config file that resides within the subdirectory named 'private'), and
|
2012-05-30 07:17:55 +00:00
|
|
|
return it.
|
2007-12-17 23:39:54 +00:00
|
|
|
"""
|
|
|
|
privname = os.path.join(self.basedir, "private", name)
|
2012-05-30 07:17:55 +00:00
|
|
|
open(privname, "w").write(value)
|
2007-12-17 23:39:54 +00:00
|
|
|
|
2012-06-11 00:46:38 +00:00
|
|
|
def get_private_config(self, name, default=_None):
|
|
|
|
"""Read the (string) contents of a private config file (which is a
|
|
|
|
config file that resides within the subdirectory named 'private'),
|
|
|
|
and return it. Return a default, or raise an error if one was not
|
|
|
|
given.
|
|
|
|
"""
|
|
|
|
privname = os.path.join(self.basedir, "private", name)
|
|
|
|
try:
|
2016-04-12 18:48:53 +00:00
|
|
|
return fileutil.read(privname).strip()
|
2012-06-11 00:46:38 +00:00
|
|
|
except EnvironmentError:
|
|
|
|
if os.path.exists(privname):
|
|
|
|
raise
|
|
|
|
if default is _None:
|
|
|
|
raise MissingConfigEntry("The required configuration file %s is missing."
|
|
|
|
% (quote_output(privname),))
|
|
|
|
return default
|
|
|
|
|
2011-11-20 23:24:26 +00:00
|
|
|
def get_or_create_private_config(self, name, default=_None):
|
2007-12-17 23:39:54 +00:00
|
|
|
"""Try to get the (string) contents of a private config file (which
|
|
|
|
is a config file that resides within the subdirectory named
|
|
|
|
'private'), and return it. Any leading or trailing whitespace will be
|
|
|
|
stripped from the data.
|
2007-08-28 02:07:12 +00:00
|
|
|
|
2011-11-20 23:24:26 +00:00
|
|
|
If the file does not exist, and default is not given, report an error.
|
|
|
|
If the file does not exist and a default is specified, try to create
|
|
|
|
it using that default, and then return the value that was written.
|
|
|
|
If 'default' is a string, use it as a default value. If not, treat it
|
|
|
|
as a zero-argument callable that is expected to return a string.
|
2007-08-28 02:07:12 +00:00
|
|
|
"""
|
2011-08-01 23:24:23 +00:00
|
|
|
privname = os.path.join(self.basedir, "private", name)
|
|
|
|
try:
|
|
|
|
value = fileutil.read(privname)
|
|
|
|
except EnvironmentError:
|
2011-11-20 23:24:26 +00:00
|
|
|
if os.path.exists(privname):
|
|
|
|
raise
|
|
|
|
if default is _None:
|
|
|
|
raise MissingConfigEntry("The required configuration file %s is missing."
|
|
|
|
% (quote_output(privname),))
|
2011-08-01 23:24:23 +00:00
|
|
|
if isinstance(default, basestring):
|
2007-12-17 23:39:54 +00:00
|
|
|
value = default
|
2007-08-28 02:07:12 +00:00
|
|
|
else:
|
2007-12-17 23:39:54 +00:00
|
|
|
value = default()
|
2011-08-01 23:24:23 +00:00
|
|
|
fileutil.write(privname, value)
|
|
|
|
return value.strip()
|
2007-08-28 01:58:39 +00:00
|
|
|
|
|
|
|
def write_config(self, name, value, mode="w"):
|
|
|
|
"""Write a string to a config file."""
|
|
|
|
fn = os.path.join(self.basedir, name)
|
|
|
|
try:
|
2013-03-19 00:30:57 +00:00
|
|
|
fileutil.write(fn, value, mode)
|
2007-08-28 01:58:39 +00:00
|
|
|
except EnvironmentError, e:
|
|
|
|
self.log("Unable to write config file '%s'" % fn)
|
|
|
|
self.log(e)
|
|
|
|
|
2007-05-24 00:54:48 +00:00
|
|
|
def startService(self):
|
2007-10-22 23:55:20 +00:00
|
|
|
# Note: this class can be started and stopped at most once.
|
2007-05-31 20:44:22 +00:00
|
|
|
self.log("Node.startService")
|
2009-07-15 07:29:29 +00:00
|
|
|
# Record the process id in the twisted log, after startService()
|
|
|
|
# (__init__ is called before fork(), but startService is called
|
|
|
|
# after). Note that Foolscap logs handle pid-logging by itself, no
|
|
|
|
# need to send a pid to the foolscap log here.
|
|
|
|
twlog.msg("My pid: %s" % os.getpid())
|
2008-03-27 01:37:54 +00:00
|
|
|
try:
|
|
|
|
os.chmod("twistd.pid", 0644)
|
|
|
|
except EnvironmentError:
|
|
|
|
pass
|
2007-03-08 22:10:36 +00:00
|
|
|
|
|
|
|
service.MultiService.startService(self)
|
2016-04-27 01:21:36 +00:00
|
|
|
self.log("%s running" % self.NODETYPE)
|
2008-03-06 20:53:21 +00:00
|
|
|
|
2007-05-23 22:08:03 +00:00
|
|
|
def stopService(self):
|
2007-05-31 20:44:22 +00:00
|
|
|
self.log("Node.stopService")
|
2016-04-27 04:54:45 +00:00
|
|
|
return service.MultiService.stopService(self)
|
2007-05-31 20:44:22 +00:00
|
|
|
|
2007-03-08 22:10:36 +00:00
|
|
|
def shutdown(self):
|
|
|
|
"""Shut down the node. Returns a Deferred that fires (with None) when
|
|
|
|
it finally stops kicking."""
|
2007-05-31 20:44:22 +00:00
|
|
|
self.log("Node.shutdown")
|
2007-03-08 22:10:36 +00:00
|
|
|
return self.stopService()
|
|
|
|
|
2007-10-12 00:30:07 +00:00
|
|
|
def setup_logging(self):
|
2011-08-01 23:24:23 +00:00
|
|
|
# we replace the formatTime() method of the log observer that
|
|
|
|
# twistd set up for us, with a method that uses our preferred
|
|
|
|
# timestamp format.
|
2008-09-20 17:35:45 +00:00
|
|
|
for o in twlog.theLogPublisher.observers:
|
2007-10-12 00:30:07 +00:00
|
|
|
# o might be a FileLogObserver's .emit method
|
|
|
|
if type(o) is type(self.setup_logging): # bound method
|
|
|
|
ob = o.im_self
|
2008-09-20 17:35:45 +00:00
|
|
|
if isinstance(ob, twlog.FileLogObserver):
|
2007-10-22 23:52:55 +00:00
|
|
|
newmeth = types.UnboundMethodType(formatTimeTahoeStyle, ob, ob.__class__)
|
2007-10-15 03:43:11 +00:00
|
|
|
ob.formatTime = newmeth
|
2007-10-12 00:30:07 +00:00
|
|
|
# TODO: twisted >2.5.0 offers maxRotatedFiles=50
|
|
|
|
|
2010-07-25 01:03:18 +00:00
|
|
|
lgfurl_file = os.path.join(self.basedir, "private", "logport.furl").encode(get_filesystem_encoding())
|
2016-07-07 02:47:00 +00:00
|
|
|
if os.path.exists(lgfurl_file):
|
|
|
|
os.remove(lgfurl_file)
|
|
|
|
self.log_tub.setOption("logport-furlfile", lgfurl_file)
|
2008-09-30 23:21:49 +00:00
|
|
|
lgfurl = self.get_config("node", "log_gatherer.furl", "")
|
|
|
|
if lgfurl:
|
|
|
|
# this is in addition to the contents of log-gatherer-furlfile
|
2016-07-07 02:47:00 +00:00
|
|
|
self.log_tub.setOption("log-gatherer-furl", lgfurl)
|
|
|
|
self.log_tub.setOption("log-gatherer-furlfile",
|
|
|
|
os.path.join(self.basedir, "log_gatherer.furl"))
|
2015-11-03 17:35:21 +00:00
|
|
|
|
2008-07-03 00:40:29 +00:00
|
|
|
incident_dir = os.path.join(self.basedir, "logs", "incidents")
|
2012-04-29 02:28:44 +00:00
|
|
|
foolscap.logging.log.setLogDir(incident_dir.encode(get_filesystem_encoding()))
|
2007-12-13 03:31:01 +00:00
|
|
|
|
2008-01-15 04:16:58 +00:00
|
|
|
def log(self, *args, **kwargs):
|
2008-07-03 00:40:29 +00:00
|
|
|
return log.msg(*args, **kwargs)
|
2008-01-15 04:16:58 +00:00
|
|
|
|
2006-12-03 01:27:18 +00:00
|
|
|
def add_service(self, s):
|
|
|
|
s.setServiceParent(self)
|
|
|
|
return s
|