mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-19 03:06:33 +00:00
pull 'basedir' entirely into _Config
Put all config-related methods into _Config; change code to ask config for paths instead of using basedir; add some better docstrings
This commit is contained in:
parent
e269d2427b
commit
35810a5692
@ -29,6 +29,7 @@ from allmydata.interfaces import IStatsProducer, SDMF_VERSION, MDMF_VERSION
|
||||
from allmydata.nodemaker import NodeMaker
|
||||
from allmydata.blacklist import Blacklist
|
||||
from allmydata.node import OldConfigOptionError, _common_config_sections
|
||||
from allmydata.node import read_config
|
||||
|
||||
|
||||
KiB=1024
|
||||
@ -156,12 +157,10 @@ class Terminator(service.Service):
|
||||
|
||||
#@defer.inlineCallbacks
|
||||
def create_client(basedir=u"."):
|
||||
from allmydata.node import read_config
|
||||
config = read_config(basedir, u"client.port", _valid_config_sections=_valid_config_sections)
|
||||
#defer.returnValue(
|
||||
return _Client(
|
||||
config,
|
||||
basedir=basedir
|
||||
)
|
||||
#)
|
||||
|
||||
@ -188,8 +187,8 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
"max_segment_size": 128*KiB,
|
||||
}
|
||||
|
||||
def __init__(self, config, basedir=u"."):
|
||||
node.Node.__init__(self, config, basedir=basedir)
|
||||
def __init__(self, config):
|
||||
node.Node.__init__(self, config)
|
||||
# All tub.registerReference must happen *after* we upcall, since
|
||||
# that's what does tub.setLocation()
|
||||
self._magic_folders = dict()
|
||||
@ -203,13 +202,13 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
self.init_storage()
|
||||
self.init_control()
|
||||
self._key_generator = KeyGenerator()
|
||||
key_gen_furl = self.get_config("client", "key_generator.furl", None)
|
||||
key_gen_furl = config.get_config("client", "key_generator.furl", None)
|
||||
if key_gen_furl:
|
||||
log.msg("[client]key_generator.furl= is now ignored, see #2783")
|
||||
self.init_client()
|
||||
self.load_static_servers()
|
||||
self.helper = None
|
||||
if self.get_config("helper", "enabled", False, boolean=True):
|
||||
if config.get_config("helper", "enabled", False, boolean=True):
|
||||
if not self._tub_is_listening:
|
||||
raise ValueError("config error: helper is enabled, but tub "
|
||||
"is not listening ('tub.port=' is empty)")
|
||||
@ -221,8 +220,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
# If the node sees an exit_trigger file, it will poll every second to see
|
||||
# whether the file still exists, and what its mtime is. If the file does not
|
||||
# exist or has not been modified for a given timeout, the node will exit.
|
||||
exit_trigger_file = os.path.join(self.basedir,
|
||||
self.EXIT_TRIGGER_FILE)
|
||||
exit_trigger_file = self.config.get_config_path(self.EXIT_TRIGGER_FILE)
|
||||
if os.path.exists(exit_trigger_file):
|
||||
age = time.time() - os.stat(exit_trigger_file)[stat.ST_MTIME]
|
||||
self.log("%s file noticed (%ds old), starting timer" % (self.EXIT_TRIGGER_FILE, age))
|
||||
@ -231,17 +229,17 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
|
||||
# this needs to happen last, so it can use getServiceNamed() to
|
||||
# acquire references to StorageServer and other web-statusable things
|
||||
webport = self.get_config("node", "web.port", None)
|
||||
webport = self.config.get_config("node", "web.port", None)
|
||||
if webport:
|
||||
self.init_web(webport) # strports string
|
||||
|
||||
def _sequencer(self):
|
||||
seqnum_s = self.get_config_from_file("announcement-seqnum")
|
||||
seqnum_s = self.config.get_config_from_file("announcement-seqnum")
|
||||
if not seqnum_s:
|
||||
seqnum_s = "0"
|
||||
seqnum = int(seqnum_s.strip())
|
||||
seqnum += 1 # increment
|
||||
self.write_config("announcement-seqnum", "%d\n" % seqnum)
|
||||
self.config.write_config_file("announcement-seqnum", "%d\n" % seqnum)
|
||||
nonce = _make_secret().strip()
|
||||
return seqnum, nonce
|
||||
|
||||
@ -249,7 +247,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
self.introducer_clients = []
|
||||
self.introducer_furls = []
|
||||
|
||||
introducers_yaml_filename = os.path.join(self.basedir, "private", "introducers.yaml")
|
||||
introducers_yaml_filename = self.config.get_private_path("introducers.yaml")
|
||||
introducers_filepath = FilePath(introducers_yaml_filename)
|
||||
|
||||
try:
|
||||
@ -265,7 +263,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
raise ValueError("'default' introducer furl cannot be specified in introducers.yaml; please fix impossible configuration.")
|
||||
|
||||
# read furl from tahoe.cfg
|
||||
tahoe_cfg_introducer_furl = self.get_config("client", "introducer.furl", None)
|
||||
tahoe_cfg_introducer_furl = self.config.get_config("client", "introducer.furl", None)
|
||||
if tahoe_cfg_introducer_furl == "None":
|
||||
raise ValueError("tahoe.cfg has invalid 'introducer.furl = None':"
|
||||
" to disable it, use 'introducer.furl ='"
|
||||
@ -274,19 +272,19 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
introducers[u'default'] = {'furl':tahoe_cfg_introducer_furl}
|
||||
|
||||
for petname, introducer in introducers.items():
|
||||
introducer_cache_filepath = FilePath(os.path.join(self.basedir, "private", "introducer_{}_cache.yaml".format(petname)))
|
||||
introducer_cache_filepath = FilePath(self.config.get_private_path("introducer_{}_cache.yaml".format(petname)))
|
||||
ic = IntroducerClient(self.tub, introducer['furl'].encode("ascii"),
|
||||
self.nickname,
|
||||
str(allmydata.__full_version__),
|
||||
str(self.OLDEST_SUPPORTED_VERSION),
|
||||
self.get_app_versions(), self._sequencer,
|
||||
self.config.get_app_versions(), self._sequencer,
|
||||
introducer_cache_filepath)
|
||||
self.introducer_clients.append(ic)
|
||||
self.introducer_furls.append(introducer['furl'])
|
||||
ic.setServiceParent(self)
|
||||
|
||||
def init_stats_provider(self):
|
||||
gatherer_furl = self.get_config("client", "stats_gatherer.furl", None)
|
||||
gatherer_furl = self.config.get_config("client", "stats_gatherer.furl", None)
|
||||
self.stats_provider = StatsProvider(self, gatherer_furl)
|
||||
self.add_service(self.stats_provider)
|
||||
self.stats_provider.register_producer(self)
|
||||
@ -295,10 +293,10 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
return { 'node.uptime': time.time() - self.started_timestamp }
|
||||
|
||||
def init_secrets(self):
|
||||
lease_s = self.get_or_create_private_config("secret", _make_secret)
|
||||
lease_s = self.config.get_or_create_private_config("secret", _make_secret)
|
||||
lease_secret = base32.a2b(lease_s)
|
||||
convergence_s = self.get_or_create_private_config('convergence',
|
||||
_make_secret)
|
||||
convergence_s = self.config.get_or_create_private_config('convergence',
|
||||
_make_secret)
|
||||
self.convergence = base32.a2b(convergence_s)
|
||||
self._secret_holder = SecretHolder(lease_secret, self.convergence)
|
||||
|
||||
@ -308,9 +306,9 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
def _make_key():
|
||||
sk_vs,vk_vs = keyutil.make_keypair()
|
||||
return sk_vs+"\n"
|
||||
sk_vs = self.get_or_create_private_config("node.privkey", _make_key)
|
||||
sk_vs = self.config.get_or_create_private_config("node.privkey", _make_key)
|
||||
sk,vk_vs = keyutil.parse_privkey(sk_vs.strip())
|
||||
self.write_config("node.pubkey", vk_vs+"\n")
|
||||
self.config.write_config_file("node.pubkey", vk_vs+"\n")
|
||||
self._node_key = sk
|
||||
|
||||
def get_long_nodeid(self):
|
||||
@ -322,7 +320,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
return idlib.nodeid_b2a(self.nodeid)
|
||||
|
||||
def _init_permutation_seed(self, ss):
|
||||
seed = self.get_config_from_file("permutation-seed")
|
||||
seed = self.config.get_config_from_file("permutation-seed")
|
||||
if not seed:
|
||||
have_shares = ss.have_shares()
|
||||
if have_shares:
|
||||
@ -339,24 +337,24 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
# pubkey-based serverid
|
||||
vk_bytes = self._node_key.get_verifying_key_bytes()
|
||||
seed = base32.b2a(vk_bytes)
|
||||
self.write_config("permutation-seed", seed+"\n")
|
||||
self.config.write_config_file("permutation-seed", seed+"\n")
|
||||
return seed.strip()
|
||||
|
||||
def init_storage(self):
|
||||
# should we run a storage server (and publish it for others to use)?
|
||||
if not self.get_config("storage", "enabled", True, boolean=True):
|
||||
if not self.config.get_config("storage", "enabled", True, boolean=True):
|
||||
return
|
||||
if not self._tub_is_listening:
|
||||
raise ValueError("config error: storage is enabled, but tub "
|
||||
"is not listening ('tub.port=' is empty)")
|
||||
readonly = self.get_config("storage", "readonly", False, boolean=True)
|
||||
readonly = self.config.get_config("storage", "readonly", False, boolean=True)
|
||||
|
||||
config_storedir = self.get_config(
|
||||
"storage", "storage_dir", self.STOREDIR,
|
||||
).decode('utf-8')
|
||||
storedir = os.path.join(self.basedir, config_storedir)
|
||||
storedir = self.config.get_config_path(config_storedir)
|
||||
|
||||
data = self.get_config("storage", "reserved_space", None)
|
||||
data = self.config.get_config("storage", "reserved_space", None)
|
||||
try:
|
||||
reserved = parse_abbreviated_size(data)
|
||||
except ValueError:
|
||||
@ -365,28 +363,28 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
raise
|
||||
if reserved is None:
|
||||
reserved = 0
|
||||
discard = self.get_config("storage", "debug_discard", False,
|
||||
boolean=True)
|
||||
discard = self.config.get_config("storage", "debug_discard", False,
|
||||
boolean=True)
|
||||
|
||||
expire = self.get_config("storage", "expire.enabled", False, boolean=True)
|
||||
expire = self.config.get_config("storage", "expire.enabled", False, boolean=True)
|
||||
if expire:
|
||||
mode = self.get_config("storage", "expire.mode") # require a mode
|
||||
mode = self.config.get_config("storage", "expire.mode") # require a mode
|
||||
else:
|
||||
mode = self.get_config("storage", "expire.mode", "age")
|
||||
mode = self.config.get_config("storage", "expire.mode", "age")
|
||||
|
||||
o_l_d = self.get_config("storage", "expire.override_lease_duration", None)
|
||||
o_l_d = self.config.get_config("storage", "expire.override_lease_duration", None)
|
||||
if o_l_d is not None:
|
||||
o_l_d = parse_duration(o_l_d)
|
||||
|
||||
cutoff_date = None
|
||||
if mode == "cutoff-date":
|
||||
cutoff_date = self.get_config("storage", "expire.cutoff_date")
|
||||
cutoff_date = self.config.get_config("storage", "expire.cutoff_date")
|
||||
cutoff_date = parse_date(cutoff_date)
|
||||
|
||||
sharetypes = []
|
||||
if self.get_config("storage", "expire.immutable", True, boolean=True):
|
||||
if self.config.get_config("storage", "expire.immutable", True, boolean=True):
|
||||
sharetypes.append("immutable")
|
||||
if self.get_config("storage", "expire.mutable", True, boolean=True):
|
||||
if self.config.get_config("storage", "expire.mutable", True, boolean=True):
|
||||
sharetypes.append("mutable")
|
||||
expiration_sharetypes = tuple(sharetypes)
|
||||
|
||||
@ -402,7 +400,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
expiration_sharetypes=expiration_sharetypes)
|
||||
self.add_service(ss)
|
||||
|
||||
furl_file = os.path.join(self.basedir, "private", "storage.furl").encode(get_filesystem_encoding())
|
||||
furl_file = self.config.get_private_path("storage.furl").encode(get_filesystem_encoding())
|
||||
furl = self.tub.registerReference(ss, furlFile=furl_file)
|
||||
ann = {"anonymous-storage-FURL": furl,
|
||||
"permutation-seed-base32": self._init_permutation_seed(ss),
|
||||
@ -411,14 +409,14 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
ic.publish("storage", ann, self._node_key)
|
||||
|
||||
def init_client(self):
|
||||
helper_furl = self.get_config("client", "helper.furl", None)
|
||||
helper_furl = self.config.get_config("client", "helper.furl", None)
|
||||
if helper_furl in ("None", ""):
|
||||
helper_furl = None
|
||||
|
||||
DEP = self.encoding_params
|
||||
DEP["k"] = int(self.get_config("client", "shares.needed", DEP["k"]))
|
||||
DEP["n"] = int(self.get_config("client", "shares.total", DEP["n"]))
|
||||
DEP["happy"] = int(self.get_config("client", "shares.happy", DEP["happy"]))
|
||||
DEP["k"] = int(self.config.get_config("client", "shares.needed", DEP["k"]))
|
||||
DEP["n"] = int(self.config.get_config("client", "shares.total", DEP["n"]))
|
||||
DEP["happy"] = int(self.config.get_config("client", "shares.happy", DEP["happy"]))
|
||||
|
||||
# for the CLI to authenticate to local JSON endpoints
|
||||
self._create_auth_token()
|
||||
@ -441,7 +439,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
Currently only the URI '/magic' for magic-folder status; other
|
||||
endpoints are invited to include this as well, as appropriate.
|
||||
"""
|
||||
return self.get_private_config('api_auth_token')
|
||||
return self.config.get_private_config('api_auth_token')
|
||||
|
||||
def _create_auth_token(self):
|
||||
"""
|
||||
@ -449,7 +447,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
|
||||
This is intentionally re-created every time the node starts.
|
||||
"""
|
||||
self.write_private_config(
|
||||
self.config.write_private_config(
|
||||
'api_auth_token',
|
||||
urlsafe_b64encode(os.urandom(32)) + '\n',
|
||||
)
|
||||
@ -457,7 +455,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
def init_client_storage_broker(self):
|
||||
# create a StorageFarmBroker object, for use by Uploader/Downloader
|
||||
# (and everybody else who wants to use storage servers)
|
||||
ps = self.get_config("client", "peers.preferred", "").split(",")
|
||||
ps = self.config.get_config("client", "peers.preferred", "").split(",")
|
||||
preferred_peers = tuple([p.strip() for p in ps if p != ""])
|
||||
sb = storage_client.StorageFarmBroker(permute_peers=True,
|
||||
tub_maker=self._create_tub,
|
||||
@ -476,7 +474,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
Load the servers.yaml file if it exists, and provide the static
|
||||
server data to the StorageFarmBroker.
|
||||
"""
|
||||
fn = os.path.join(self.basedir, "private", "servers.yaml")
|
||||
fn = self.config.get_private_path("servers.yaml")
|
||||
servers_filepath = FilePath(fn)
|
||||
try:
|
||||
with servers_filepath.open() as f:
|
||||
@ -489,11 +487,11 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
pass
|
||||
|
||||
def init_blacklist(self):
|
||||
fn = os.path.join(self.basedir, "access.blacklist")
|
||||
fn = self.config.get_config_path("access.blacklist")
|
||||
self.blacklist = Blacklist(fn)
|
||||
|
||||
def init_nodemaker(self):
|
||||
default = self.get_config("client", "mutable.format", default="SDMF")
|
||||
default = self.config.get_config("client", "mutable.format", default="SDMF")
|
||||
if default.upper() == "MDMF":
|
||||
self.mutable_file_default = MDMF_VERSION
|
||||
else:
|
||||
@ -515,10 +513,10 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
c = ControlServer()
|
||||
c.setServiceParent(self)
|
||||
control_url = self.control_tub.registerReference(c)
|
||||
self.write_private_config("control.furl", control_url + "\n")
|
||||
self.config.write_private_config("control.furl", control_url + "\n")
|
||||
|
||||
def init_helper(self):
|
||||
self.helper = Helper(os.path.join(self.basedir, "helper"),
|
||||
self.helper = Helper(self.config.get_config_path("helper"),
|
||||
self.storage_broker, self._secret_holder,
|
||||
self.stats_provider, self.history)
|
||||
# TODO: this is confusing. BASEDIR/private/helper.furl is created by
|
||||
@ -526,8 +524,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
# to use the helper. I like having the filename be the same, since
|
||||
# that makes 'cp' work smoothly, but the difference between config
|
||||
# inputs and generated outputs is hard to see.
|
||||
helper_furlfile = os.path.join(self.basedir,
|
||||
"private", "helper.furl").encode(get_filesystem_encoding())
|
||||
helper_furlfile = self.config.get_private_path("helper.furl").encode(get_filesystem_encoding())
|
||||
self.tub.registerReference(self.helper, furlFile=helper_furlfile)
|
||||
|
||||
def set_default_mutable_keysize(self, keysize):
|
||||
@ -537,35 +534,35 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
self.log("init_web(webport=%s)", args=(webport,))
|
||||
|
||||
from allmydata.webish import WebishServer
|
||||
nodeurl_path = os.path.join(self.basedir, "node.url")
|
||||
staticdir_config = self.get_config("node", "web.static", "public_html").decode("utf-8")
|
||||
staticdir = abspath_expanduser_unicode(staticdir_config, base=self.basedir)
|
||||
nodeurl_path = self.config.get_config_path("node.url")
|
||||
staticdir_config = self.config.get_config("node", "web.static", "public_html").decode("utf-8")
|
||||
staticdir = self.config.get_config_path(staticdir_config)
|
||||
ws = WebishServer(self, webport, nodeurl_path, staticdir)
|
||||
self.add_service(ws)
|
||||
|
||||
def init_ftp_server(self):
|
||||
if self.get_config("ftpd", "enabled", False, boolean=True):
|
||||
if self.config.get_config("ftpd", "enabled", False, boolean=True):
|
||||
accountfile = from_utf8_or_none(
|
||||
self.get_config("ftpd", "accounts.file", None))
|
||||
self.config.get_config("ftpd", "accounts.file", None))
|
||||
if accountfile:
|
||||
accountfile = abspath_expanduser_unicode(accountfile, base=self.basedir)
|
||||
accounturl = self.get_config("ftpd", "accounts.url", None)
|
||||
ftp_portstr = self.get_config("ftpd", "port", "8021")
|
||||
accountfile = self.config.get_config_path(accountfile)
|
||||
accounturl = self.config.get_config("ftpd", "accounts.url", None)
|
||||
ftp_portstr = self.config.get_config("ftpd", "port", "8021")
|
||||
|
||||
from allmydata.frontends import ftpd
|
||||
s = ftpd.FTPServer(self, accountfile, accounturl, ftp_portstr)
|
||||
s.setServiceParent(self)
|
||||
|
||||
def init_sftp_server(self):
|
||||
if self.get_config("sftpd", "enabled", False, boolean=True):
|
||||
if self.config.get_config("sftpd", "enabled", False, boolean=True):
|
||||
accountfile = from_utf8_or_none(
|
||||
self.get_config("sftpd", "accounts.file", None))
|
||||
self.config.get_config("sftpd", "accounts.file", None))
|
||||
if accountfile:
|
||||
accountfile = abspath_expanduser_unicode(accountfile, base=self.basedir)
|
||||
accounturl = self.get_config("sftpd", "accounts.url", None)
|
||||
sftp_portstr = self.get_config("sftpd", "port", "8022")
|
||||
pubkey_file = from_utf8_or_none(self.get_config("sftpd", "host_pubkey_file"))
|
||||
privkey_file = from_utf8_or_none(self.get_config("sftpd", "host_privkey_file"))
|
||||
accountfile = self.config.get_config_path(accountfile)
|
||||
accounturl = self.config.get_config("sftpd", "accounts.url", None)
|
||||
sftp_portstr = self.config.get_config("sftpd", "port", "8022")
|
||||
pubkey_file = from_utf8_or_none(self.config.get_config("sftpd", "host_pubkey_file"))
|
||||
privkey_file = from_utf8_or_none(self.config.get_config("sftpd", "host_privkey_file"))
|
||||
|
||||
from allmydata.frontends import sftpd
|
||||
s = sftpd.SFTPServer(self, accountfile, accounturl,
|
||||
@ -574,15 +571,15 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
|
||||
def init_magic_folder(self):
|
||||
#print "init_magic_folder"
|
||||
if self.get_config("drop_upload", "enabled", False, boolean=True):
|
||||
if self.config.get_config("drop_upload", "enabled", False, boolean=True):
|
||||
raise OldConfigOptionError("The [drop_upload] section must be renamed to [magic_folder].\n"
|
||||
"See docs/frontends/magic-folder.rst for more information.")
|
||||
|
||||
if self.get_config("magic_folder", "enabled", False, boolean=True):
|
||||
if self.config.get_config("magic_folder", "enabled", False, boolean=True):
|
||||
from allmydata.frontends import magic_folder
|
||||
|
||||
try:
|
||||
magic_folders = magic_folder.load_magic_folders(self.basedir)
|
||||
magic_folders = magic_folder.load_magic_folders(self.config._basedir)
|
||||
except Exception as e:
|
||||
log.msg("Error loading magic-folder config: {}".format(e))
|
||||
raise
|
||||
|
@ -150,7 +150,7 @@ class SpeedTest:
|
||||
self.size = size
|
||||
self.mutable_mode = mutable
|
||||
self.uris = {}
|
||||
self.basedir = os.path.join(self.parent.basedir, "_speed_test_data")
|
||||
self.basedir = self.parent.config.get_config_path("_speed_test_data")
|
||||
|
||||
def run(self):
|
||||
self.create_data()
|
||||
|
@ -6,7 +6,6 @@ from foolscap.api import Referenceable
|
||||
import allmydata
|
||||
from allmydata import node
|
||||
from allmydata.util import log, rrefutil
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.introducer.interfaces import \
|
||||
RIIntroducerPublisherAndSubscriberService_v2
|
||||
from allmydata.introducer.common import unsign_from_foolscap, \
|
||||
@ -27,7 +26,6 @@ def create_introducer(basedir=u"."):
|
||||
#defer.returnValue(
|
||||
return _IntroducerNode(
|
||||
config,
|
||||
basedir=basedir
|
||||
)
|
||||
#)
|
||||
|
||||
@ -35,8 +33,8 @@ def create_introducer(basedir=u"."):
|
||||
class _IntroducerNode(node.Node):
|
||||
NODETYPE = "introducer"
|
||||
|
||||
def __init__(self, config, basedir=u"."):
|
||||
node.Node.__init__(self, config, basedir=basedir)
|
||||
def __init__(self, config):
|
||||
node.Node.__init__(self, config)
|
||||
self.init_introducer()
|
||||
webport = self.get_config("node", "web.port", None)
|
||||
if webport:
|
||||
@ -46,11 +44,11 @@ class _IntroducerNode(node.Node):
|
||||
if not self._tub_is_listening:
|
||||
raise ValueError("config error: we are Introducer, but tub "
|
||||
"is not listening ('tub.port=' is empty)")
|
||||
introducerservice = IntroducerService(self.basedir)
|
||||
introducerservice = IntroducerService()
|
||||
self.add_service(introducerservice)
|
||||
|
||||
old_public_fn = os.path.join(self.basedir, u"introducer.furl")
|
||||
private_fn = os.path.join(self.basedir, u"private", u"introducer.furl")
|
||||
old_public_fn = self.config.get_config_path(u"introducer.furl")
|
||||
private_fn = self.config.get_private_path(u"introducer.furl")
|
||||
|
||||
if os.path.exists(old_public_fn):
|
||||
if os.path.exists(private_fn):
|
||||
@ -73,9 +71,9 @@ class _IntroducerNode(node.Node):
|
||||
self.log("init_web(webport=%s)", args=(webport,), umid="2bUygA")
|
||||
|
||||
from allmydata.webish import IntroducerWebishServer
|
||||
nodeurl_path = os.path.join(self.basedir, u"node.url")
|
||||
nodeurl_path = self.config.get_config_path(u"node.url")
|
||||
config_staticdir = self.get_config("node", "web.static", "public_html").decode('utf-8')
|
||||
staticdir = abspath_expanduser_unicode(config_staticdir, base=self.basedir)
|
||||
staticdir = self.config.get_config_path(config_staticdir)
|
||||
ws = IntroducerWebishServer(self, webport, nodeurl_path, staticdir)
|
||||
self.add_service(ws)
|
||||
|
||||
@ -89,7 +87,7 @@ class IntroducerService(service.MultiService, Referenceable):
|
||||
"application-version": str(allmydata.__full_version__),
|
||||
}
|
||||
|
||||
def __init__(self, basedir="."):
|
||||
def __init__(self):
|
||||
service.MultiService.__init__(self)
|
||||
self.introducer_url = None
|
||||
# 'index' is (service_name, key_s, tubid), where key_s or tubid is
|
||||
|
@ -146,14 +146,19 @@ def read_config(basedir, portnumfile, generated_files=[], _valid_config_sections
|
||||
except EnvironmentError:
|
||||
if os.path.exists(config_fname):
|
||||
raise
|
||||
|
||||
configutil.validate_config(config_fname, parser, _valid_config_sections())
|
||||
return _Config(parser, portnumfile, config_fname)
|
||||
|
||||
# make sure we have a private configuration area
|
||||
fileutil.make_dirs(os.path.join(basedir, "private"), 0o700)
|
||||
return _Config(parser, portnumfile, basedir, config_fname)
|
||||
|
||||
|
||||
def config_from_string(config_str, portnumfile):
|
||||
def config_from_string(config_str, portnumfile, basedir):
|
||||
# load configuration from in-memory string
|
||||
parser = ConfigParser.SafeConfigParser()
|
||||
parser.readfp(BytesIO(config_str))
|
||||
return _Config(parser, portnumfile, '<in-memory>')
|
||||
return _Config(parser, portnumfile, basedir, '<in-memory>')
|
||||
|
||||
|
||||
def _error_about_old_config_files(basedir, generated_files):
|
||||
@ -189,11 +194,11 @@ class _Config(object):
|
||||
as a helper instead.
|
||||
"""
|
||||
|
||||
def __init__(self, configparser, portnum_fname, config_fname):
|
||||
def __init__(self, configparser, portnum_fname, basedir, config_fname):
|
||||
# XXX I think this portnumfile thing is just legacy?
|
||||
self.portnum_fname = portnum_fname
|
||||
self._config_fname = config_fname
|
||||
|
||||
self._basedir = abspath_expanduser_unicode(unicode(basedir))
|
||||
self._config_fname = config_fname # the actual fname "configparser" came from
|
||||
self.config = configparser
|
||||
|
||||
nickname_utf8 = self.get_config("node", "nickname", "<unspecified>")
|
||||
@ -211,6 +216,22 @@ class _Config(object):
|
||||
if os.path.exists(self.config_fname):
|
||||
raise
|
||||
|
||||
def get_app_versions(self):
|
||||
# TODO: merge this with allmydata.get_package_versions
|
||||
return dict(app_versions.versions)
|
||||
|
||||
def write_config_file(self, name, value, mode="w"):
|
||||
"""
|
||||
writes the given 'value' into a file called 'name' in the config
|
||||
directory
|
||||
"""
|
||||
fn = os.path.join(self._basedir, name)
|
||||
try:
|
||||
fileutil.write(fn, value, mode)
|
||||
except EnvironmentError as e:
|
||||
log.msg("Unable to write config file '{}'".format(fn))
|
||||
log.err(e)
|
||||
|
||||
def get_config(self, section, option, default=_None, boolean=False):
|
||||
try:
|
||||
if boolean:
|
||||
@ -232,6 +253,87 @@ class _Config(object):
|
||||
)
|
||||
return default
|
||||
|
||||
def get_config_from_file(self, name, required=False):
|
||||
"""Get the (string) contents of a config file, or None if the file
|
||||
did not exist. If required=True, raise an exception rather than
|
||||
returning None. Any leading or trailing whitespace will be stripped
|
||||
from the data."""
|
||||
fn = os.path.join(self._basedir, name)
|
||||
try:
|
||||
return fileutil.read(fn).strip()
|
||||
except EnvironmentError:
|
||||
if not required:
|
||||
return None
|
||||
raise
|
||||
|
||||
def get_or_create_private_config(self, name, default=_None):
|
||||
"""Try to get the (string) contents of a private config file (which
|
||||
is a config file that resides within the subdirectory named
|
||||
'private'), and return it. Any leading or trailing whitespace will be
|
||||
stripped from the data.
|
||||
|
||||
If the file does not exist, and default is not given, report an error.
|
||||
If the file does not exist and a default is specified, try to create
|
||||
it using that default, and then return the value that was written.
|
||||
If 'default' is a string, use it as a default value. If not, treat it
|
||||
as a zero-argument callable that is expected to return a string.
|
||||
"""
|
||||
privname = os.path.join(self._basedir, "private", name)
|
||||
try:
|
||||
value = fileutil.read(privname)
|
||||
except EnvironmentError:
|
||||
if os.path.exists(privname):
|
||||
raise
|
||||
if default is _None:
|
||||
raise MissingConfigEntry("The required configuration file %s is missing."
|
||||
% (quote_output(privname),))
|
||||
if isinstance(default, basestring):
|
||||
value = default
|
||||
else:
|
||||
value = default()
|
||||
fileutil.write(privname, value)
|
||||
return value.strip()
|
||||
|
||||
def write_private_config(self, name, value):
|
||||
"""Write the (string) contents of a private config file (which is a
|
||||
config file that resides within the subdirectory named 'private'), and
|
||||
return it.
|
||||
"""
|
||||
privname = os.path.join(self._basedir, "private", name)
|
||||
with open(privname, "w") as f:
|
||||
f.write(value)
|
||||
|
||||
def get_private_config(self, name, default=_None):
|
||||
"""Read the (string) contents of a private config file (which is a
|
||||
config file that resides within the subdirectory named 'private'),
|
||||
and return it. Return a default, or raise an error if one was not
|
||||
given.
|
||||
"""
|
||||
privname = os.path.join(self._basedir, "private", name)
|
||||
try:
|
||||
return fileutil.read(privname).strip()
|
||||
except EnvironmentError:
|
||||
if os.path.exists(privname):
|
||||
raise
|
||||
if default is _None:
|
||||
raise MissingConfigEntry("The required configuration file %s is missing."
|
||||
% (quote_output(privname),))
|
||||
return default
|
||||
|
||||
def get_private_path(self, *args):
|
||||
"""
|
||||
returns an absolute path inside the 'private' directory with any
|
||||
extra args join()-ed
|
||||
"""
|
||||
return os.path.join(self._basedir, "private", *args)
|
||||
|
||||
def get_config_path(self, *args):
|
||||
"""
|
||||
returns an absolute path inside the config directory with any
|
||||
extra args join()-ed
|
||||
"""
|
||||
return os.path.join(self._basedir, *args)
|
||||
|
||||
@staticmethod
|
||||
def _contains_unescaped_hash(item):
|
||||
characters = iter(item)
|
||||
@ -253,17 +355,15 @@ class Node(service.MultiService):
|
||||
CERTFILE = "node.pem"
|
||||
GENERATED_FILES = []
|
||||
|
||||
def __init__(self, config, basedir=u"."):
|
||||
def __init__(self, config):
|
||||
"""
|
||||
Initialize the node with the given configuration. It's base directory
|
||||
Initialize the node with the given configuration. Its base directory
|
||||
is the current directory by default.
|
||||
"""
|
||||
service.MultiService.__init__(self)
|
||||
# ideally, this would only be in _Config (or otherwise abstracted)
|
||||
self.basedir = abspath_expanduser_unicode(unicode(basedir))
|
||||
# XXX don't write files in ctor!
|
||||
fileutil.make_dirs(os.path.join(self.basedir, "private"), 0700)
|
||||
with open(os.path.join(self.basedir, "private", "README"), "w") as f:
|
||||
fileutil.make_dirs(os.path.join(config._basedir, "private"), 0700)
|
||||
with open(os.path.join(config._basedir, "private", "README"), "w") as f:
|
||||
f.write(PRIV_README)
|
||||
|
||||
self.config = config
|
||||
@ -292,7 +392,9 @@ class Node(service.MultiService):
|
||||
Initialize/create a directory for temporary files.
|
||||
"""
|
||||
tempdir_config = self.config.get_config("node", "tempdir", "tmp").decode('utf-8')
|
||||
tempdir = abspath_expanduser_unicode(tempdir_config, base=self.basedir)
|
||||
tempdir = self.config.get_config_path(tempdir_config)
|
||||
tempdir0 = abspath_expanduser_unicode(tempdir_config, base=self.config._basedir)
|
||||
assert tempdir == tempdir0
|
||||
if not os.path.exists(tempdir):
|
||||
fileutil.make_dirs(tempdir)
|
||||
tempfile.tempdir = tempdir
|
||||
@ -308,12 +410,12 @@ class Node(service.MultiService):
|
||||
self._reveal_ip = self.config.get_config("node", "reveal-IP-address", True,
|
||||
boolean=True)
|
||||
def create_i2p_provider(self):
|
||||
self._i2p_provider = i2p_provider.Provider(self.basedir, self.config, reactor)
|
||||
self._i2p_provider = i2p_provider.Provider(self.config, reactor)
|
||||
self._i2p_provider.check_dest_config()
|
||||
self._i2p_provider.setServiceParent(self)
|
||||
|
||||
def create_tor_provider(self):
|
||||
self._tor_provider = tor_provider.Provider(self.basedir, self.config, reactor)
|
||||
self._tor_provider = tor_provider.Provider(self.config, reactor)
|
||||
self._tor_provider.check_onion_config()
|
||||
self._tor_provider.setServiceParent(self)
|
||||
|
||||
@ -475,11 +577,11 @@ class Node(service.MultiService):
|
||||
return tubport, location
|
||||
|
||||
def create_main_tub(self):
|
||||
certfile = os.path.join(self.basedir, "private", self.CERTFILE)
|
||||
certfile = self.config.get_private_path(self.CERTFILE)
|
||||
self.tub = self._create_tub(certFile=certfile)
|
||||
|
||||
self.nodeid = b32decode(self.tub.tubID.upper()) # binary format
|
||||
self.write_config("my_nodeid", b32encode(self.nodeid).lower() + "\n")
|
||||
self.config.write_config_file("my_nodeid", b32encode(self.nodeid).lower() + "\n")
|
||||
self.short_nodeid = b32encode(self.nodeid).lower()[:8] # for printing
|
||||
cfg_tubport = self.config.get_config("node", "tub.port", None)
|
||||
cfg_location = self.config.get_config("node", "tub.location", None)
|
||||
@ -538,86 +640,6 @@ class Node(service.MultiService):
|
||||
self.log("Log Tub location set to %s" % (location,))
|
||||
self.log_tub.setServiceParent(self)
|
||||
|
||||
def get_app_versions(self):
|
||||
# TODO: merge this with allmydata.get_package_versions
|
||||
return dict(app_versions.versions)
|
||||
|
||||
def get_config_from_file(self, name, required=False):
|
||||
"""Get the (string) contents of a config file, or None if the file
|
||||
did not exist. If required=True, raise an exception rather than
|
||||
returning None. Any leading or trailing whitespace will be stripped
|
||||
from the data."""
|
||||
fn = os.path.join(self.basedir, name)
|
||||
try:
|
||||
return fileutil.read(fn).strip()
|
||||
except EnvironmentError:
|
||||
if not required:
|
||||
return None
|
||||
raise
|
||||
|
||||
def write_private_config(self, name, value):
|
||||
"""Write the (string) contents of a private config file (which is a
|
||||
config file that resides within the subdirectory named 'private'), and
|
||||
return it.
|
||||
"""
|
||||
privname = os.path.join(self.basedir, "private", name)
|
||||
with open(privname, "w") as f:
|
||||
f.write(value)
|
||||
|
||||
def get_private_config(self, name, default=_None):
|
||||
"""Read the (string) contents of a private config file (which is a
|
||||
config file that resides within the subdirectory named 'private'),
|
||||
and return it. Return a default, or raise an error if one was not
|
||||
given.
|
||||
"""
|
||||
privname = os.path.join(self.basedir, "private", name)
|
||||
try:
|
||||
return fileutil.read(privname).strip()
|
||||
except EnvironmentError:
|
||||
if os.path.exists(privname):
|
||||
raise
|
||||
if default is _None:
|
||||
raise MissingConfigEntry("The required configuration file %s is missing."
|
||||
% (quote_output(privname),))
|
||||
return default
|
||||
|
||||
def get_or_create_private_config(self, name, default=_None):
|
||||
"""Try to get the (string) contents of a private config file (which
|
||||
is a config file that resides within the subdirectory named
|
||||
'private'), and return it. Any leading or trailing whitespace will be
|
||||
stripped from the data.
|
||||
|
||||
If the file does not exist, and default is not given, report an error.
|
||||
If the file does not exist and a default is specified, try to create
|
||||
it using that default, and then return the value that was written.
|
||||
If 'default' is a string, use it as a default value. If not, treat it
|
||||
as a zero-argument callable that is expected to return a string.
|
||||
"""
|
||||
privname = os.path.join(self.basedir, "private", name)
|
||||
try:
|
||||
value = fileutil.read(privname)
|
||||
except EnvironmentError:
|
||||
if os.path.exists(privname):
|
||||
raise
|
||||
if default is _None:
|
||||
raise MissingConfigEntry("The required configuration file %s is missing."
|
||||
% (quote_output(privname),))
|
||||
if isinstance(default, basestring):
|
||||
value = default
|
||||
else:
|
||||
value = default()
|
||||
fileutil.write(privname, value)
|
||||
return value.strip()
|
||||
|
||||
def write_config(self, name, value, mode="w"):
|
||||
"""Write a string to a config file."""
|
||||
fn = os.path.join(self.basedir, name)
|
||||
try:
|
||||
fileutil.write(fn, value, mode)
|
||||
except EnvironmentError, e:
|
||||
self.log("Unable to write config file '%s'" % fn)
|
||||
self.log(e)
|
||||
|
||||
def startService(self):
|
||||
# Note: this class can be started and stopped at most once.
|
||||
self.log("Node.startService")
|
||||
@ -658,7 +680,7 @@ class Node(service.MultiService):
|
||||
ob.formatTime = newmeth
|
||||
# TODO: twisted >2.5.0 offers maxRotatedFiles=50
|
||||
|
||||
lgfurl_file = os.path.join(self.basedir, "private", "logport.furl").encode(get_filesystem_encoding())
|
||||
lgfurl_file = self.config.get_private_path("logport.furl").encode(get_filesystem_encoding())
|
||||
if os.path.exists(lgfurl_file):
|
||||
os.remove(lgfurl_file)
|
||||
self.log_tub.setOption("logport-furlfile", lgfurl_file)
|
||||
@ -667,9 +689,9 @@ class Node(service.MultiService):
|
||||
# this is in addition to the contents of log-gatherer-furlfile
|
||||
self.log_tub.setOption("log-gatherer-furl", lgfurl)
|
||||
self.log_tub.setOption("log-gatherer-furlfile",
|
||||
os.path.join(self.basedir, "log_gatherer.furl"))
|
||||
self.config.get_config_path("log_gatherer.furl"))
|
||||
|
||||
incident_dir = os.path.join(self.basedir, "logs", "incidents")
|
||||
incident_dir = self.config.get_config_path("logs", "incidents")
|
||||
foolscap.logging.log.setLogDir(incident_dir.encode(get_filesystem_encoding()))
|
||||
twlog.msg("Foolscap logging initialized")
|
||||
twlog.msg("Note to developers: twistd.log does not receive very much.")
|
||||
|
@ -26,7 +26,8 @@ import treq
|
||||
from allmydata.util.assertutil import _assert
|
||||
|
||||
from allmydata import uri as tahoe_uri
|
||||
from allmydata.client import _Client, _valid_config_sections
|
||||
from allmydata.client import _Client
|
||||
from allmydata.client import _valid_config_sections as client_valid_config_sections
|
||||
from allmydata.storage.server import StorageServer, storage_index_to_dir
|
||||
from allmydata.util import fileutil, idlib, hashutil
|
||||
from allmydata.util.hashutil import permute_server_hash
|
||||
@ -188,8 +189,8 @@ def NoNetworkClient(basedir):
|
||||
# XXX FIXME this is just to avoid massive search-replace for now;
|
||||
# should be create_nonetwork_client() or something...
|
||||
from allmydata.node import read_config
|
||||
config = read_config(basedir, u'client.port', _valid_config_sections=_valid_config_sections)
|
||||
return _NoNetworkClient(config, basedir=basedir)
|
||||
config = read_config(basedir, u'client.port', _valid_config_sections=client_valid_config_sections)
|
||||
return _NoNetworkClient(config)
|
||||
|
||||
|
||||
class _NoNetworkClient(_Client):
|
||||
@ -401,10 +402,7 @@ class GridTestMixin:
|
||||
for c in self.g.clients]
|
||||
|
||||
def get_clientdir(self, i=0):
|
||||
return self.g.clients[i].basedir
|
||||
|
||||
def set_clientdir(self, basedir, i=0):
|
||||
self.g.clients[i].basedir = basedir
|
||||
return self.g.clients[i].config._basedir
|
||||
|
||||
def get_client(self, i=0):
|
||||
return self.g.clients[i]
|
||||
|
@ -10,8 +10,7 @@ from ..util import connection_status
|
||||
class FakeNode(Node):
|
||||
def __init__(self, config_str):
|
||||
from allmydata.node import config_from_string
|
||||
self.config = config_from_string(config_str, "fake.port")
|
||||
self.basedir = "BASEDIR"
|
||||
self.config = config_from_string(config_str, "fake.port", "no-basedir")
|
||||
self._reveal_ip = True
|
||||
self.services = []
|
||||
self.create_i2p_provider()
|
||||
@ -60,7 +59,7 @@ class Tor(unittest.TestCase):
|
||||
return_value=h1) as f:
|
||||
n = FakeNode(config)
|
||||
h = n._make_tor_handler()
|
||||
private_dir = os.path.join(n.basedir, "private")
|
||||
private_dir = n.config.get_config_path("private")
|
||||
exp = mock.call(n._tor_provider._make_control_endpoint,
|
||||
takes_status=True)
|
||||
self.assertEqual(f.mock_calls, [exp])
|
||||
@ -78,7 +77,8 @@ class Tor(unittest.TestCase):
|
||||
d = tp._make_control_endpoint(reactor,
|
||||
update_status=lambda status: None)
|
||||
cep = self.successResultOf(d)
|
||||
launch_tor.assert_called_with(reactor, executable, private_dir,
|
||||
launch_tor.assert_called_with(reactor, executable,
|
||||
os.path.abspath(private_dir),
|
||||
tp._txtorcon)
|
||||
cfs.assert_called_with(reactor, "ep_desc")
|
||||
self.assertIs(cep, tcep)
|
||||
|
@ -193,16 +193,16 @@ class FakeConfig(dict):
|
||||
|
||||
class Provider(unittest.TestCase):
|
||||
def test_build(self):
|
||||
i2p_provider.Provider("basedir", FakeConfig(), "reactor")
|
||||
i2p_provider.Provider(FakeConfig(), "reactor")
|
||||
|
||||
def test_handler_disabled(self):
|
||||
p = i2p_provider.Provider("basedir", FakeConfig(enabled=False),
|
||||
p = i2p_provider.Provider(FakeConfig(enabled=False),
|
||||
"reactor")
|
||||
self.assertEqual(p.get_i2p_handler(), None)
|
||||
|
||||
def test_handler_no_i2p(self):
|
||||
with mock_i2p(None):
|
||||
p = i2p_provider.Provider("basedir", FakeConfig(), "reactor")
|
||||
p = i2p_provider.Provider(FakeConfig(), "reactor")
|
||||
self.assertEqual(p.get_i2p_handler(), None)
|
||||
|
||||
def test_handler_sam_endpoint(self):
|
||||
@ -213,8 +213,7 @@ class Provider(unittest.TestCase):
|
||||
reactor = object()
|
||||
|
||||
with mock_i2p(i2p):
|
||||
p = i2p_provider.Provider("basedir",
|
||||
FakeConfig(**{"sam.port": "ep_desc"}),
|
||||
p = i2p_provider.Provider(FakeConfig(**{"sam.port": "ep_desc"}),
|
||||
reactor)
|
||||
with mock.patch("allmydata.util.i2p_provider.clientFromString",
|
||||
return_value=ep) as cfs:
|
||||
@ -230,7 +229,7 @@ class Provider(unittest.TestCase):
|
||||
reactor = object()
|
||||
|
||||
with mock_i2p(i2p):
|
||||
p = i2p_provider.Provider("basedir", FakeConfig(launch=True),
|
||||
p = i2p_provider.Provider(FakeConfig(launch=True),
|
||||
reactor)
|
||||
h = p.get_i2p_handler()
|
||||
self.assertIs(h, handler)
|
||||
@ -243,8 +242,7 @@ class Provider(unittest.TestCase):
|
||||
reactor = object()
|
||||
|
||||
with mock_i2p(i2p):
|
||||
p = i2p_provider.Provider("basedir",
|
||||
FakeConfig(launch=True,
|
||||
p = i2p_provider.Provider(FakeConfig(launch=True,
|
||||
**{"i2p.configdir": "configdir"}),
|
||||
reactor)
|
||||
h = p.get_i2p_handler()
|
||||
@ -258,8 +256,7 @@ class Provider(unittest.TestCase):
|
||||
reactor = object()
|
||||
|
||||
with mock_i2p(i2p):
|
||||
p = i2p_provider.Provider("basedir",
|
||||
FakeConfig(launch=True,
|
||||
p = i2p_provider.Provider(FakeConfig(launch=True,
|
||||
**{"i2p.configdir": "configdir",
|
||||
"i2p.executable": "myi2p",
|
||||
}),
|
||||
@ -275,8 +272,7 @@ class Provider(unittest.TestCase):
|
||||
reactor = object()
|
||||
|
||||
with mock_i2p(i2p):
|
||||
p = i2p_provider.Provider("basedir",
|
||||
FakeConfig(**{"i2p.configdir": "configdir"}),
|
||||
p = i2p_provider.Provider(FakeConfig(**{"i2p.configdir": "configdir"}),
|
||||
reactor)
|
||||
h = p.get_i2p_handler()
|
||||
i2p.local_i2p.assert_called_with("configdir")
|
||||
@ -289,7 +285,7 @@ class Provider(unittest.TestCase):
|
||||
reactor = object()
|
||||
|
||||
with mock_i2p(i2p):
|
||||
p = i2p_provider.Provider("basedir", FakeConfig(), reactor)
|
||||
p = i2p_provider.Provider(FakeConfig(), reactor)
|
||||
h = p.get_i2p_handler()
|
||||
self.assertIs(h, handler)
|
||||
i2p.default.assert_called_with(reactor, keyfile=None)
|
||||
@ -308,8 +304,7 @@ class ProviderListener(unittest.TestCase):
|
||||
|
||||
privkeyfile = os.path.join("private", "i2p_dest.privkey")
|
||||
with mock_i2p(i2p):
|
||||
p = i2p_provider.Provider("basedir",
|
||||
FakeConfig(**{
|
||||
p = i2p_provider.Provider(FakeConfig(**{
|
||||
"i2p.configdir": "configdir",
|
||||
"sam.port": "good:port",
|
||||
"dest": "true",
|
||||
@ -326,37 +321,36 @@ class Provider_CheckI2PConfig(unittest.TestCase):
|
||||
# default config doesn't start an I2P service, so it should be
|
||||
# happy both with and without txi2p
|
||||
|
||||
p = i2p_provider.Provider("basedir", FakeConfig(), "reactor")
|
||||
p = i2p_provider.Provider(FakeConfig(), "reactor")
|
||||
p.check_dest_config()
|
||||
|
||||
with mock_txi2p(None):
|
||||
p = i2p_provider.Provider("basedir", FakeConfig(), "reactor")
|
||||
p = i2p_provider.Provider(FakeConfig(), "reactor")
|
||||
p.check_dest_config()
|
||||
|
||||
def test_no_txi2p(self):
|
||||
with mock_txi2p(None):
|
||||
p = i2p_provider.Provider("basedir", FakeConfig(dest=True),
|
||||
p = i2p_provider.Provider(FakeConfig(dest=True),
|
||||
"reactor")
|
||||
e = self.assertRaises(ValueError, p.check_dest_config)
|
||||
self.assertEqual(str(e), "Cannot create I2P Destination without txi2p. "
|
||||
"Please 'pip install tahoe-lafs[i2p]' to fix.")
|
||||
|
||||
def test_no_launch_no_control(self):
|
||||
p = i2p_provider.Provider("basedir", FakeConfig(dest=True), "reactor")
|
||||
p = i2p_provider.Provider(FakeConfig(dest=True), "reactor")
|
||||
e = self.assertRaises(ValueError, p.check_dest_config)
|
||||
self.assertEqual(str(e), "[i2p] dest = true, but we have neither "
|
||||
"sam.port= nor launch=true nor configdir=")
|
||||
|
||||
def test_missing_keys(self):
|
||||
p = i2p_provider.Provider("basedir", FakeConfig(dest=True,
|
||||
p = i2p_provider.Provider(FakeConfig(dest=True,
|
||||
**{"sam.port": "x",
|
||||
}), "reactor")
|
||||
e = self.assertRaises(ValueError, p.check_dest_config)
|
||||
self.assertEqual(str(e), "[i2p] dest = true, "
|
||||
"but dest.port= is missing")
|
||||
|
||||
p = i2p_provider.Provider("basedir",
|
||||
FakeConfig(dest=True,
|
||||
p = i2p_provider.Provider(FakeConfig(dest=True,
|
||||
**{"sam.port": "x",
|
||||
"dest.port": "y",
|
||||
}), "reactor")
|
||||
@ -365,8 +359,7 @@ class Provider_CheckI2PConfig(unittest.TestCase):
|
||||
"but dest.private_key_file= is missing")
|
||||
|
||||
def test_launch_not_implemented(self):
|
||||
p = i2p_provider.Provider("basedir",
|
||||
FakeConfig(dest=True, launch=True,
|
||||
p = i2p_provider.Provider(FakeConfig(dest=True, launch=True,
|
||||
**{"dest.port": "x",
|
||||
"dest.private_key_file": "y",
|
||||
}), "reactor")
|
||||
@ -374,8 +367,7 @@ class Provider_CheckI2PConfig(unittest.TestCase):
|
||||
self.assertEqual(str(e), "[i2p] launch is under development.")
|
||||
|
||||
def test_ok(self):
|
||||
p = i2p_provider.Provider("basedir",
|
||||
FakeConfig(dest=True,
|
||||
p = i2p_provider.Provider(FakeConfig(dest=True,
|
||||
**{"sam.port": "x",
|
||||
"dest.port": "y",
|
||||
"dest.private_key_file": "z",
|
||||
|
@ -16,7 +16,8 @@ import foolscap.logging.log
|
||||
from twisted.application import service
|
||||
from allmydata.node import Node, formatTimeTahoeStyle, MissingConfigEntry, read_config, config_from_string
|
||||
from allmydata.introducer.server import create_introducer
|
||||
from allmydata.client import create_client, _valid_config_sections
|
||||
from allmydata.client import create_client
|
||||
from allmydata.client import _valid_config_sections as client_valid_config_sections
|
||||
from allmydata.util import fileutil, iputil
|
||||
from allmydata.util.namespace import Namespace
|
||||
from allmydata.util.configutil import UnknownConfigError
|
||||
@ -34,7 +35,7 @@ class TestNode(Node):
|
||||
config = read_config(
|
||||
basedir,
|
||||
'DEFAULT_PORTNUMFILE_BLANK',
|
||||
_valid_config_sections=_valid_config_sections,
|
||||
_valid_config_sections=client_valid_config_sections,
|
||||
)
|
||||
Node.__init__(self, config, basedir)
|
||||
|
||||
@ -120,9 +121,8 @@ class TestCase(testutil.SignalMixin, unittest.TestCase):
|
||||
f.write(u"nickname = \u2621\n".encode('utf-8'))
|
||||
f.close()
|
||||
|
||||
n = TestNode(basedir)
|
||||
n.setServiceParent(self.parent)
|
||||
self.failUnlessEqual(n.get_config("node", "nickname").decode('utf-8'),
|
||||
config = read_config(basedir, "")
|
||||
self.failUnlessEqual(config.get_config("node", "nickname").decode('utf-8'),
|
||||
u"\u2621")
|
||||
|
||||
def test_tahoe_cfg_hash_in_name(self):
|
||||
@ -133,8 +133,9 @@ class TestCase(testutil.SignalMixin, unittest.TestCase):
|
||||
f.write("[node]\n")
|
||||
f.write("nickname = %s\n" % (nickname,))
|
||||
f.close()
|
||||
n = TestNode(basedir)
|
||||
self.failUnless(n.nickname == nickname)
|
||||
|
||||
config = read_config(basedir, "")
|
||||
self.failUnless(config.nickname == nickname)
|
||||
|
||||
def test_private_config(self):
|
||||
basedir = "test_node/test_private_config"
|
||||
@ -144,26 +145,44 @@ class TestCase(testutil.SignalMixin, unittest.TestCase):
|
||||
f.write("secret")
|
||||
f.close()
|
||||
|
||||
n = TestNode(basedir)
|
||||
self.failUnlessEqual(n.get_private_config("already"), "secret")
|
||||
self.failUnlessEqual(n.get_private_config("not", "default"), "default")
|
||||
self.failUnlessRaises(MissingConfigEntry, n.get_private_config, "not")
|
||||
value = n.get_or_create_private_config("new", "start")
|
||||
config = config_from_string("", "", basedir)
|
||||
|
||||
self.failUnlessEqual(config.get_private_config("already"), "secret")
|
||||
self.failUnlessEqual(config.get_private_config("not", "default"), "default")
|
||||
self.failUnlessRaises(MissingConfigEntry, config.get_private_config, "not")
|
||||
value = config.get_or_create_private_config("new", "start")
|
||||
self.failUnlessEqual(value, "start")
|
||||
self.failUnlessEqual(n.get_private_config("new"), "start")
|
||||
self.failUnlessEqual(config.get_private_config("new"), "start")
|
||||
counter = []
|
||||
def make_newer():
|
||||
counter.append("called")
|
||||
return "newer"
|
||||
value = n.get_or_create_private_config("newer", make_newer)
|
||||
value = config.get_or_create_private_config("newer", make_newer)
|
||||
self.failUnlessEqual(len(counter), 1)
|
||||
self.failUnlessEqual(value, "newer")
|
||||
self.failUnlessEqual(n.get_private_config("newer"), "newer")
|
||||
self.failUnlessEqual(config.get_private_config("newer"), "newer")
|
||||
|
||||
value = n.get_or_create_private_config("newer", make_newer)
|
||||
value = config.get_or_create_private_config("newer", make_newer)
|
||||
self.failUnlessEqual(len(counter), 1) # don't call unless necessary
|
||||
self.failUnlessEqual(value, "newer")
|
||||
|
||||
def test_write_config_unwritable_file(self):
|
||||
basedir = "test_node/configdir"
|
||||
fileutil.make_dirs(basedir)
|
||||
config = config_from_string("", "", basedir)
|
||||
with open(os.path.join(basedir, "bad"), "w") as f:
|
||||
f.write("bad")
|
||||
os.chmod(os.path.join(basedir, "bad"), 0o000)
|
||||
|
||||
config.write_config_file("bad", "some value")
|
||||
|
||||
errs = self.flushLoggedErrors()
|
||||
self.assertEqual(1, len(errs))
|
||||
self.assertIn(
|
||||
"IOError",
|
||||
str(errs[0])
|
||||
)
|
||||
|
||||
def test_timestamp(self):
|
||||
# this modified logger doesn't seem to get used during the tests,
|
||||
# probably because we don't modify the LogObserver that trial
|
||||
@ -177,8 +196,8 @@ class TestCase(testutil.SignalMixin, unittest.TestCase):
|
||||
def test_secrets_dir(self):
|
||||
basedir = "test_node/test_secrets_dir"
|
||||
fileutil.make_dirs(basedir)
|
||||
n = TestNode(basedir)
|
||||
self.failUnless(isinstance(n, TestNode))
|
||||
read_config(basedir, "")
|
||||
|
||||
self.failUnless(os.path.exists(os.path.join(basedir, "private")))
|
||||
|
||||
def test_secrets_dir_protected(self):
|
||||
@ -189,8 +208,8 @@ class TestCase(testutil.SignalMixin, unittest.TestCase):
|
||||
raise unittest.SkipTest("We don't know how to set permissions on Windows.")
|
||||
basedir = "test_node/test_secrets_dir_protected"
|
||||
fileutil.make_dirs(basedir)
|
||||
n = TestNode(basedir)
|
||||
self.failUnless(isinstance(n, TestNode))
|
||||
read_config(basedir, "")
|
||||
|
||||
privdir = os.path.join(basedir, "private")
|
||||
st = os.stat(privdir)
|
||||
bits = stat.S_IMODE(st[stat.ST_MODE])
|
||||
@ -212,8 +231,8 @@ class TestCase(testutil.SignalMixin, unittest.TestCase):
|
||||
|
||||
class EmptyNode(Node):
|
||||
def __init__(self):
|
||||
config = config_from_string("", "no portfile")
|
||||
Node.__init__(self, config, 'no basedir')
|
||||
config = config_from_string("", "no portfile", 'no basedir')
|
||||
Node.__init__(self, config)
|
||||
|
||||
EXPECTED = {
|
||||
# top-level key is tub.port category
|
||||
@ -389,7 +408,7 @@ class Listeners(unittest.TestCase):
|
||||
n.config = read_config(
|
||||
n.basedir,
|
||||
"client.port",
|
||||
_valid_config_sections=_valid_config_sections,
|
||||
_valid_config_sections=client_valid_config_sections,
|
||||
)
|
||||
n.check_privacy()
|
||||
n.services = []
|
||||
@ -419,7 +438,7 @@ class Listeners(unittest.TestCase):
|
||||
n.config = read_config(
|
||||
n.basedir,
|
||||
"client.port",
|
||||
_valid_config_sections=_valid_config_sections,
|
||||
_valid_config_sections=client_valid_config_sections,
|
||||
)
|
||||
n.check_privacy()
|
||||
n.services = []
|
||||
|
@ -2120,7 +2120,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
# exercise the remote-control-the-client foolscap interfaces in
|
||||
# allmydata.control (mostly used for performance tests)
|
||||
c0 = self.clients[0]
|
||||
control_furl_file = os.path.join(c0.basedir, "private", "control.furl")
|
||||
control_furl_file = c0.config.get_private_path("control.furl")
|
||||
control_furl = open(control_furl_file, "r").read().strip()
|
||||
# it doesn't really matter which Tub we use to connect to the client,
|
||||
# so let's just use our IntroducerNode's
|
||||
|
@ -261,6 +261,7 @@ class CreateOnion(unittest.TestCase):
|
||||
privkey = f.read()
|
||||
self.assertEqual(privkey, "privkey")
|
||||
|
||||
|
||||
_None = object()
|
||||
class FakeConfig(dict):
|
||||
def get_config(self, section, option, default=_None, boolean=False):
|
||||
@ -271,6 +272,10 @@ class FakeConfig(dict):
|
||||
raise KeyError
|
||||
return value
|
||||
|
||||
def get_config_path(self, *args):
|
||||
return os.path.join(self.get("basedir", "basedir"), *args)
|
||||
|
||||
|
||||
class EmptyContext(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
@ -281,21 +286,21 @@ class EmptyContext(object):
|
||||
|
||||
class Provider(unittest.TestCase):
|
||||
def test_build(self):
|
||||
tor_provider.Provider("basedir", FakeConfig(), "reactor")
|
||||
tor_provider.Provider(FakeConfig(), "reactor")
|
||||
|
||||
def test_handler_disabled(self):
|
||||
p = tor_provider.Provider("basedir", FakeConfig(enabled=False),
|
||||
p = tor_provider.Provider(FakeConfig(enabled=False),
|
||||
"reactor")
|
||||
self.assertEqual(p.get_tor_handler(), None)
|
||||
|
||||
def test_handler_no_tor(self):
|
||||
with mock_tor(None):
|
||||
p = tor_provider.Provider("basedir", FakeConfig(), "reactor")
|
||||
p = tor_provider.Provider(FakeConfig(), "reactor")
|
||||
self.assertEqual(p.get_tor_handler(), None)
|
||||
|
||||
def test_handler_launch_no_txtorcon(self):
|
||||
with mock_txtorcon(None):
|
||||
p = tor_provider.Provider("basedir", FakeConfig(launch=True),
|
||||
p = tor_provider.Provider(FakeConfig(launch=True),
|
||||
"reactor")
|
||||
self.assertEqual(p.get_tor_handler(), None)
|
||||
|
||||
@ -309,7 +314,7 @@ class Provider(unittest.TestCase):
|
||||
tor.add_context = mock.Mock(return_value=EmptyContext())
|
||||
with mock_tor(tor):
|
||||
with mock_txtorcon(txtorcon):
|
||||
p = tor_provider.Provider("basedir", FakeConfig(launch=True),
|
||||
p = tor_provider.Provider(FakeConfig(launch=True),
|
||||
reactor)
|
||||
h = p.get_tor_handler()
|
||||
self.assertIs(h, handler)
|
||||
@ -355,8 +360,7 @@ class Provider(unittest.TestCase):
|
||||
reactor = object()
|
||||
|
||||
with mock_tor(tor):
|
||||
p = tor_provider.Provider("basedir",
|
||||
FakeConfig(**{"socks.port": "ep_desc"}),
|
||||
p = tor_provider.Provider(FakeConfig(**{"socks.port": "ep_desc"}),
|
||||
reactor)
|
||||
with mock.patch("allmydata.util.tor_provider.clientFromString", cfs):
|
||||
h = p.get_tor_handler()
|
||||
@ -373,8 +377,7 @@ class Provider(unittest.TestCase):
|
||||
reactor = object()
|
||||
|
||||
with mock_tor(tor):
|
||||
p = tor_provider.Provider("basedir",
|
||||
FakeConfig(**{"control.port": "ep_desc"}),
|
||||
p = tor_provider.Provider(FakeConfig(**{"control.port": "ep_desc"}),
|
||||
reactor)
|
||||
with mock.patch("allmydata.util.tor_provider.clientFromString", cfs):
|
||||
h = p.get_tor_handler()
|
||||
@ -388,7 +391,7 @@ class Provider(unittest.TestCase):
|
||||
tor.default_socks = mock.Mock(return_value=handler)
|
||||
|
||||
with mock_tor(tor):
|
||||
p = tor_provider.Provider("basedir", FakeConfig(), "reactor")
|
||||
p = tor_provider.Provider(FakeConfig(), "reactor")
|
||||
h = p.get_tor_handler()
|
||||
self.assertIs(h, handler)
|
||||
tor.default_socks.assert_called_with()
|
||||
@ -405,8 +408,7 @@ class ProviderListener(unittest.TestCase):
|
||||
reactor = object()
|
||||
|
||||
with mock_tor(tor):
|
||||
p = tor_provider.Provider("basedir",
|
||||
FakeConfig(**{"onion.local_port": "321"}),
|
||||
p = tor_provider.Provider(FakeConfig(**{"onion.local_port": "321"}),
|
||||
reactor)
|
||||
fake_ep = object()
|
||||
with mock.patch("allmydata.util.tor_provider.TCP4ServerEndpoint",
|
||||
@ -421,44 +423,42 @@ class Provider_CheckOnionConfig(unittest.TestCase):
|
||||
# default config doesn't start an onion service, so it should be
|
||||
# happy both with and without txtorcon
|
||||
|
||||
p = tor_provider.Provider("basedir", FakeConfig(), "reactor")
|
||||
p = tor_provider.Provider(FakeConfig(), "reactor")
|
||||
p.check_onion_config()
|
||||
|
||||
with mock_txtorcon(None):
|
||||
p = tor_provider.Provider("basedir", FakeConfig(), "reactor")
|
||||
p = tor_provider.Provider(FakeConfig(), "reactor")
|
||||
p.check_onion_config()
|
||||
|
||||
def test_no_txtorcon(self):
|
||||
with mock_txtorcon(None):
|
||||
p = tor_provider.Provider("basedir", FakeConfig(onion=True),
|
||||
p = tor_provider.Provider(FakeConfig(onion=True),
|
||||
"reactor")
|
||||
e = self.assertRaises(ValueError, p.check_onion_config)
|
||||
self.assertEqual(str(e), "Cannot create onion without txtorcon. "
|
||||
"Please 'pip install tahoe-lafs[tor]' to fix.")
|
||||
|
||||
def test_no_launch_no_control(self):
|
||||
p = tor_provider.Provider("basedir", FakeConfig(onion=True), "reactor")
|
||||
p = tor_provider.Provider(FakeConfig(onion=True), "reactor")
|
||||
e = self.assertRaises(ValueError, p.check_onion_config)
|
||||
self.assertEqual(str(e), "[tor] onion = true, but we have neither "
|
||||
"launch=true nor control.port=")
|
||||
|
||||
def test_missing_keys(self):
|
||||
p = tor_provider.Provider("basedir", FakeConfig(onion=True,
|
||||
launch=True), "reactor")
|
||||
p = tor_provider.Provider(FakeConfig(onion=True,
|
||||
launch=True), "reactor")
|
||||
e = self.assertRaises(ValueError, p.check_onion_config)
|
||||
self.assertEqual(str(e), "[tor] onion = true, "
|
||||
"but onion.local_port= is missing")
|
||||
|
||||
p = tor_provider.Provider("basedir",
|
||||
FakeConfig(onion=True, launch=True,
|
||||
p = tor_provider.Provider(FakeConfig(onion=True, launch=True,
|
||||
**{"onion.local_port": "x",
|
||||
}), "reactor")
|
||||
e = self.assertRaises(ValueError, p.check_onion_config)
|
||||
self.assertEqual(str(e), "[tor] onion = true, "
|
||||
"but onion.external_port= is missing")
|
||||
|
||||
p = tor_provider.Provider("basedir",
|
||||
FakeConfig(onion=True, launch=True,
|
||||
p = tor_provider.Provider(FakeConfig(onion=True, launch=True,
|
||||
**{"onion.local_port": "x",
|
||||
"onion.external_port": "y",
|
||||
}), "reactor")
|
||||
@ -467,8 +467,7 @@ class Provider_CheckOnionConfig(unittest.TestCase):
|
||||
"but onion.private_key_file= is missing")
|
||||
|
||||
def test_ok(self):
|
||||
p = tor_provider.Provider("basedir",
|
||||
FakeConfig(onion=True, launch=True,
|
||||
p = tor_provider.Provider(FakeConfig(onion=True, launch=True,
|
||||
**{"onion.local_port": "x",
|
||||
"onion.external_port": "y",
|
||||
"onion.private_key_file": "z",
|
||||
@ -478,7 +477,7 @@ class Provider_CheckOnionConfig(unittest.TestCase):
|
||||
class Provider_Service(unittest.TestCase):
|
||||
def test_no_onion(self):
|
||||
reactor = object()
|
||||
p = tor_provider.Provider("basedir", FakeConfig(onion=False), reactor)
|
||||
p = tor_provider.Provider(FakeConfig(onion=False), reactor)
|
||||
with mock.patch("allmydata.util.tor_provider.Provider._start_onion") as s:
|
||||
p.startService()
|
||||
self.assertEqual(s.mock_calls, [])
|
||||
@ -495,7 +494,7 @@ class Provider_Service(unittest.TestCase):
|
||||
with open(fn, "w") as f:
|
||||
f.write("private key")
|
||||
reactor = object()
|
||||
cfg = FakeConfig(onion=True, launch=True,
|
||||
cfg = FakeConfig(basedir=basedir, onion=True, launch=True,
|
||||
**{"onion.local_port": 123,
|
||||
"onion.external_port": 456,
|
||||
"onion.private_key_file": "keyfile",
|
||||
@ -503,7 +502,7 @@ class Provider_Service(unittest.TestCase):
|
||||
|
||||
txtorcon = mock.Mock()
|
||||
with mock_txtorcon(txtorcon):
|
||||
p = tor_provider.Provider(basedir, cfg, reactor)
|
||||
p = tor_provider.Provider(cfg, reactor)
|
||||
tor_state = mock.Mock()
|
||||
tor_state.protocol = object()
|
||||
ehs = mock.Mock()
|
||||
@ -535,7 +534,7 @@ class Provider_Service(unittest.TestCase):
|
||||
with open(fn, "w") as f:
|
||||
f.write("private key")
|
||||
reactor = object()
|
||||
cfg = FakeConfig(onion=True,
|
||||
cfg = FakeConfig(basedir=basedir, onion=True,
|
||||
**{"control.port": "ep_desc",
|
||||
"onion.local_port": 123,
|
||||
"onion.external_port": 456,
|
||||
@ -544,7 +543,7 @@ class Provider_Service(unittest.TestCase):
|
||||
|
||||
txtorcon = mock.Mock()
|
||||
with mock_txtorcon(txtorcon):
|
||||
p = tor_provider.Provider(basedir, cfg, reactor)
|
||||
p = tor_provider.Provider(cfg, reactor)
|
||||
tor_state = mock.Mock()
|
||||
tor_state.protocol = object()
|
||||
txtorcon.build_tor_connection = mock.Mock(return_value=tor_state)
|
||||
|
@ -1238,8 +1238,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.basedir = "web/Grid/blacklist"
|
||||
self.set_up_grid(oneshare=True)
|
||||
c0 = self.g.clients[0]
|
||||
c0_basedir = c0.basedir
|
||||
fn = os.path.join(c0_basedir, "access.blacklist")
|
||||
fn = c0.config.get_config_path("access.blacklist")
|
||||
self.uris = {}
|
||||
DATA = "off-limits " * 50
|
||||
|
||||
|
@ -25,7 +25,7 @@ class IntroducerWeb(unittest.TestCase):
|
||||
)
|
||||
from allmydata.node import config_from_string
|
||||
self.node = IntroducerNode(
|
||||
config_from_string(config, portnumfile="introducer.port"),
|
||||
config_from_string(config, "introducer.port", "no-basedir"),
|
||||
)
|
||||
self.ws = self.node.getServiceNamed("webish")
|
||||
|
||||
|
@ -124,9 +124,8 @@ def create_config(reactor, cli_config):
|
||||
# a nice error, and startService will throw an ugly error.
|
||||
|
||||
class Provider(service.MultiService):
|
||||
def __init__(self, basedir, config, reactor):
|
||||
def __init__(self, config, reactor):
|
||||
service.MultiService.__init__(self)
|
||||
self._basedir = basedir
|
||||
self._config = config
|
||||
self._i2p = _import_i2p()
|
||||
self._txi2p = _import_txi2p()
|
||||
|
@ -198,10 +198,9 @@ def create_config(reactor, cli_config):
|
||||
# nice error, and startService will throw an ugly error.
|
||||
|
||||
class Provider(service.MultiService):
|
||||
def __init__(self, basedir, node_for_config, reactor):
|
||||
def __init__(self, config, reactor):
|
||||
service.MultiService.__init__(self)
|
||||
self._basedir = basedir
|
||||
self._node_for_config = node_for_config
|
||||
self._config = config
|
||||
self._tor_launched = None
|
||||
self._onion_ehs = None
|
||||
self._onion_tor_control_proto = None
|
||||
@ -210,7 +209,7 @@ class Provider(service.MultiService):
|
||||
self._reactor = reactor
|
||||
|
||||
def _get_tor_config(self, *args, **kwargs):
|
||||
return self._node_for_config.get_config("tor", *args, **kwargs)
|
||||
return self._config.get_config("tor", *args, **kwargs)
|
||||
|
||||
def get_listener(self):
|
||||
local_port = int(self._get_tor_config("onion.local_port"))
|
||||
@ -255,7 +254,7 @@ class Provider(service.MultiService):
|
||||
# this fires with a tuple of (control_endpoint, tor_protocol)
|
||||
if not self._tor_launched:
|
||||
self._tor_launched = OneShotObserverList()
|
||||
private_dir = os.path.join(self._basedir, "private")
|
||||
private_dir = self._config.get_config_path("private")
|
||||
tor_binary = self._get_tor_config("tor.executable", None)
|
||||
d = _launch_tor(reactor, tor_binary, private_dir, self._txtorcon)
|
||||
d.addBoth(self._tor_launched.fire)
|
||||
@ -298,7 +297,7 @@ class Provider(service.MultiService):
|
||||
external_port = int(self._get_tor_config("onion.external_port"))
|
||||
|
||||
fn = self._get_tor_config("onion.private_key_file")
|
||||
privkeyfile = os.path.join(self._basedir, fn)
|
||||
privkeyfile = self._config.get_config_path(fn)
|
||||
with open(privkeyfile, "rb") as f:
|
||||
privkey = f.read()
|
||||
ehs = self._txtorcon.EphemeralHiddenService(
|
||||
|
Loading…
Reference in New Issue
Block a user