mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-23 23:02:25 +00:00
Change uses of os.path.expanduser and os.path.abspath. refs #2235
Signed-off-by: Daira Hopwood <daira@jacaranda.org>
This commit is contained in:
parent
4a0cdce86b
commit
14f783086f
@ -16,6 +16,7 @@ from allmydata.control import ControlServer
|
||||
from allmydata.introducer.client import IntroducerClient
|
||||
from allmydata.util import hashutil, base32, pollmixin, log, keyutil, idlib
|
||||
from allmydata.util.encodingutil import get_filesystem_encoding
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.abbreviate import parse_abbreviated_size
|
||||
from allmydata.util.time_format import parse_duration, parse_date
|
||||
from allmydata.stats import StatsProvider
|
||||
@ -450,8 +451,8 @@ class Client(node.Node, pollmixin.PollMixin):
|
||||
|
||||
from allmydata.webish import WebishServer
|
||||
nodeurl_path = os.path.join(self.basedir, "node.url")
|
||||
staticdir = self.get_config("node", "web.static", "public_html")
|
||||
staticdir = os.path.expanduser(staticdir)
|
||||
staticdir_config = self.get_config("node", "web.static", "public_html").decode("utf-8")
|
||||
staticdir = abspath_expanduser_unicode(staticdir_config, base=self.basedir)
|
||||
ws = WebishServer(self, webport, nodeurl_path, staticdir)
|
||||
self.add_service(ws)
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
import os
|
||||
|
||||
from zope.interface import implements
|
||||
from twisted.web.client import getPage
|
||||
from twisted.internet import defer
|
||||
@ -7,6 +8,8 @@ from twisted.conch import error as conch_error
|
||||
from twisted.conch.ssh import keys
|
||||
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
|
||||
|
||||
class NeedRootcapLookupScheme(Exception):
|
||||
"""Accountname+Password-based access schemes require some kind of
|
||||
@ -28,7 +31,7 @@ class AccountFileChecker:
|
||||
self.passwords = {}
|
||||
self.pubkeys = {}
|
||||
self.rootcaps = {}
|
||||
for line in open(os.path.expanduser(accountfile), "r"):
|
||||
for line in open(abspath_expanduser_unicode(accountfile), "r"):
|
||||
line = line.strip()
|
||||
if line.startswith("#") or not line:
|
||||
continue
|
||||
|
@ -1,5 +1,5 @@
|
||||
|
||||
import os, sys
|
||||
import sys
|
||||
|
||||
from twisted.internet import defer
|
||||
from twisted.python.filepath import FilePath
|
||||
@ -9,6 +9,7 @@ from foolscap.api import eventually
|
||||
from allmydata.interfaces import IDirectoryNode
|
||||
|
||||
from allmydata.util.encodingutil import quote_output, get_filesystem_encoding
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.immutable.upload import FileName
|
||||
|
||||
|
||||
@ -19,7 +20,7 @@ class DropUploader(service.MultiService):
|
||||
service.MultiService.__init__(self)
|
||||
|
||||
try:
|
||||
local_dir_u = os.path.expanduser(local_dir_utf8.decode('utf-8'))
|
||||
local_dir_u = abspath_expanduser_unicode(local_dir_utf8.decode('utf-8'))
|
||||
if sys.platform == "win32":
|
||||
local_dir = local_dir_u
|
||||
else:
|
||||
|
@ -6,7 +6,7 @@ from foolscap.api import Referenceable
|
||||
import allmydata
|
||||
from allmydata import node
|
||||
from allmydata.util import log, rrefutil
|
||||
from allmydata.util.encodingutil import get_filesystem_encoding
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.introducer.interfaces import \
|
||||
RIIntroducerPublisherAndSubscriberService_v2
|
||||
from allmydata.introducer.common import convert_announcement_v1_to_v2, \
|
||||
@ -21,7 +21,7 @@ class IntroducerNode(node.Node):
|
||||
NODETYPE = "introducer"
|
||||
GENERATED_FILES = ['introducer.furl']
|
||||
|
||||
def __init__(self, basedir="."):
|
||||
def __init__(self, basedir=u"."):
|
||||
node.Node.__init__(self, basedir)
|
||||
self.read_config()
|
||||
self.init_introducer()
|
||||
@ -33,8 +33,8 @@ class IntroducerNode(node.Node):
|
||||
introducerservice = IntroducerService(self.basedir)
|
||||
self.add_service(introducerservice)
|
||||
|
||||
old_public_fn = os.path.join(self.basedir, "introducer.furl").encode(get_filesystem_encoding())
|
||||
private_fn = os.path.join(self.basedir, "private", "introducer.furl").encode(get_filesystem_encoding())
|
||||
old_public_fn = os.path.join(self.basedir, u"introducer.furl")
|
||||
private_fn = os.path.join(self.basedir, u"private", u"introducer.furl")
|
||||
|
||||
if os.path.exists(old_public_fn):
|
||||
if os.path.exists(private_fn):
|
||||
@ -62,9 +62,9 @@ class IntroducerNode(node.Node):
|
||||
self.log("init_web(webport=%s)", args=(webport,), umid="2bUygA")
|
||||
|
||||
from allmydata.webish import IntroducerWebishServer
|
||||
nodeurl_path = os.path.join(self.basedir, "node.url")
|
||||
staticdir = self.get_config("node", "web.static", "public_html")
|
||||
staticdir = os.path.expanduser(staticdir)
|
||||
nodeurl_path = os.path.join(self.basedir, u"node.url")
|
||||
config_staticdir = self.get_config("node", "web.static", "public_html").decode('utf-8')
|
||||
staticdir = abspath_expanduser_unicode(config_staticdir, base=self.basedir)
|
||||
ws = IntroducerWebishServer(self, webport, nodeurl_path, staticdir)
|
||||
self.add_service(ws)
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
|
||||
# this is adapted from my code in Buildbot -warner
|
||||
|
||||
import os.path
|
||||
import binascii, base64
|
||||
|
||||
from twisted.python import log
|
||||
from twisted.application import service, strports
|
||||
from twisted.cred import checkers, portal
|
||||
@ -12,6 +12,8 @@ from twisted.internet import protocol
|
||||
|
||||
from zope.interface import implements
|
||||
|
||||
from allmydata.util.fileutil import precondition_abspath
|
||||
|
||||
# makeTelnetProtocol and _TelnetRealm are for the TelnetManhole
|
||||
|
||||
class makeTelnetProtocol:
|
||||
@ -63,7 +65,8 @@ class AuthorizedKeysChecker(conchc.SSHPublicKeyDatabase):
|
||||
"""
|
||||
|
||||
def __init__(self, authorized_keys_file):
|
||||
self.authorized_keys_file = os.path.expanduser(authorized_keys_file)
|
||||
precondition_abspath(authorized_keys_file)
|
||||
self.authorized_keys_file = authorized_keys_file
|
||||
|
||||
def checkKey(self, credentials):
|
||||
f = open(self.authorized_keys_file)
|
||||
@ -244,14 +247,12 @@ class AuthorizedKeysManhole(_BaseManhole):
|
||||
'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a
|
||||
simple tcp port.
|
||||
|
||||
@param keyfile: the name of a file (relative to the buildmaster's
|
||||
basedir) that contains SSH public keys of authorized
|
||||
users, one per line. This is the exact same format
|
||||
as used by sshd in ~/.ssh/authorized_keys .
|
||||
@param keyfile: the path of a file that contains SSH public keys of
|
||||
authorized users, one per line. This is the exact
|
||||
same format as used by sshd in ~/.ssh/authorized_keys .
|
||||
The path should be absolute.
|
||||
"""
|
||||
|
||||
# TODO: expanduser this, and make it relative to the buildmaster's
|
||||
# basedir
|
||||
self.keyfile = keyfile
|
||||
c = AuthorizedKeysChecker(keyfile)
|
||||
_BaseManhole.__init__(self, port, c)
|
||||
|
@ -93,12 +93,11 @@ class Node(service.MultiService):
|
||||
iputil.increase_rlimits()
|
||||
|
||||
def init_tempdir(self):
|
||||
local_tempdir_utf8 = "tmp" # default is NODEDIR/tmp/
|
||||
tempdir = self.get_config("node", "tempdir", local_tempdir_utf8).decode('utf-8')
|
||||
tempdir = os.path.join(self.basedir, tempdir)
|
||||
tempdir_config = self.get_config("node", "tempdir", "tmp").decode('utf-8')
|
||||
tempdir = abspath_expanduser_unicode(tempdir_config, base=self.basedir)
|
||||
if not os.path.exists(tempdir):
|
||||
fileutil.make_dirs(tempdir)
|
||||
tempfile.tempdir = abspath_expanduser_unicode(tempdir)
|
||||
tempfile.tempdir = tempdir
|
||||
# this should cause twisted.web.http (which uses
|
||||
# tempfile.TemporaryFile) to put large request bodies in the given
|
||||
# directory. Without this, the default temp dir is usually /tmp/,
|
||||
@ -220,11 +219,12 @@ class Node(service.MultiService):
|
||||
def setup_ssh(self):
|
||||
ssh_port = self.get_config("node", "ssh.port", "")
|
||||
if ssh_port:
|
||||
ssh_keyfile = self.get_config("node", "ssh.authorized_keys_file").decode('utf-8')
|
||||
ssh_keyfile_config = self.get_config("node", "ssh.authorized_keys_file").decode('utf-8')
|
||||
ssh_keyfile = abspath_expanduser_unicode(ssh_keyfile_config, base=self.basedir)
|
||||
from allmydata import manhole
|
||||
m = manhole.AuthorizedKeysManhole(ssh_port, ssh_keyfile.encode(get_filesystem_encoding()))
|
||||
m = manhole.AuthorizedKeysManhole(ssh_port, ssh_keyfile)
|
||||
m.setServiceParent(self)
|
||||
self.log("AuthorizedKeysManhole listening on %s" % ssh_port)
|
||||
self.log("AuthorizedKeysManhole listening on %s" % (ssh_port,))
|
||||
|
||||
def get_app_versions(self):
|
||||
# TODO: merge this with allmydata.get_package_versions
|
||||
|
@ -140,15 +140,11 @@ class GetOptions(FilesystemOptions):
|
||||
# tahoe get FOO bar # write to local file
|
||||
# tahoe get tahoe:FOO bar # same
|
||||
|
||||
if arg2 == "-":
|
||||
arg2 = None
|
||||
|
||||
self.from_file = argv_to_unicode(arg1)
|
||||
|
||||
if arg2:
|
||||
self.to_file = argv_to_unicode(arg2)
|
||||
else:
|
||||
self.to_file = None
|
||||
|
||||
if self.to_file == "-":
|
||||
self.to_file = None
|
||||
self.to_file = None if arg2 is None else argv_to_abspath(arg2)
|
||||
|
||||
def getSynopsis(self):
|
||||
return "Usage: %s [global-opts] get [options] REMOTE_FILE LOCAL_FILE" % (self.command_name,)
|
||||
@ -180,17 +176,11 @@ class PutOptions(FilesystemOptions):
|
||||
def parseArgs(self, arg1=None, arg2=None):
|
||||
# see Examples below
|
||||
|
||||
if arg1 is not None and arg2 is not None:
|
||||
self.from_file = argv_to_unicode(arg1)
|
||||
self.to_file = argv_to_unicode(arg2)
|
||||
elif arg1 is not None and arg2 is None:
|
||||
self.from_file = argv_to_unicode(arg1) # might be "-"
|
||||
self.to_file = None
|
||||
else:
|
||||
self.from_file = None
|
||||
self.to_file = None
|
||||
if self.from_file == u"-":
|
||||
self.from_file = None
|
||||
if arg1 == "-":
|
||||
arg1 = None
|
||||
|
||||
self.from_file = None if arg1 is None else argv_to_abspath(arg1)
|
||||
self.to_file = None if arg2 is None else argv_to_unicode(arg2)
|
||||
|
||||
if self['format']:
|
||||
if self['format'].upper() not in ("SDMF", "MDMF", "CHK"):
|
||||
|
@ -1,5 +1,5 @@
|
||||
|
||||
import os, urllib
|
||||
import urllib
|
||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
UnknownAliasError
|
||||
from allmydata.scripts.common_http import do_http, format_http_error
|
||||
@ -26,7 +26,7 @@ def get(options):
|
||||
resp = do_http("GET", url)
|
||||
if resp.status in (200, 201,):
|
||||
if to_file:
|
||||
outf = open(os.path.expanduser(to_file), "wb")
|
||||
outf = open(to_file, "wb")
|
||||
else:
|
||||
outf = stdout
|
||||
while True:
|
||||
|
@ -1,7 +1,7 @@
|
||||
|
||||
import os
|
||||
from cStringIO import StringIO
|
||||
import urllib
|
||||
|
||||
from allmydata.scripts.common_http import do_http, format_http_success, format_http_error
|
||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
UnknownAliasError
|
||||
@ -73,7 +73,7 @@ def put(options):
|
||||
url += "?" + "&".join(queryargs)
|
||||
|
||||
if from_file:
|
||||
infileobj = open(os.path.expanduser(from_file), "rb")
|
||||
infileobj = open(from_file, "rb")
|
||||
else:
|
||||
# do_http() can't use stdin directly: for one thing, we need a
|
||||
# Content-Length field. So we currently must copy it.
|
||||
|
@ -11,8 +11,8 @@ from twisted.application.internet import TimerService
|
||||
from zope.interface import implements
|
||||
from foolscap.api import eventually, DeadReferenceError, Referenceable, Tub
|
||||
|
||||
from allmydata.util import log
|
||||
from allmydata.util.encodingutil import quote_output
|
||||
from allmydata.util import log, fileutil
|
||||
from allmydata.util.encodingutil import quote_local_unicode_path
|
||||
from allmydata.interfaces import RIStatsProvider, RIStatsGatherer, IStatsProducer
|
||||
|
||||
class LoadMonitor(service.MultiService):
|
||||
@ -246,7 +246,7 @@ class StdOutStatsGatherer(StatsGatherer):
|
||||
class PickleStatsGatherer(StdOutStatsGatherer):
|
||||
# inherit from StdOutStatsGatherer for connect/disconnect notifications
|
||||
|
||||
def __init__(self, basedir=".", verbose=True):
|
||||
def __init__(self, basedir=u".", verbose=True):
|
||||
self.verbose = verbose
|
||||
StatsGatherer.__init__(self, basedir)
|
||||
self.picklefile = os.path.join(basedir, "stats.pickle")
|
||||
@ -258,7 +258,7 @@ class PickleStatsGatherer(StdOutStatsGatherer):
|
||||
except Exception:
|
||||
print ("Error while attempting to load pickle file %s.\n"
|
||||
"You may need to restore this file from a backup, or delete it if no backup is available.\n" %
|
||||
quote_output(os.path.abspath(self.picklefile)))
|
||||
quote_local_unicode_path(self.picklefile))
|
||||
raise
|
||||
f.close()
|
||||
else:
|
||||
@ -311,7 +311,7 @@ class StatsGathererService(service.MultiService):
|
||||
def save_portnum(self, junk):
|
||||
portnum = self.listener.getPortnum()
|
||||
portnumfile = os.path.join(self.basedir, 'portnum')
|
||||
open(portnumfile, 'wb').write('%d\n' % (portnum,))
|
||||
fileutil.write(portnumfile, '%d\n' % (portnum,))
|
||||
|
||||
def tub_ready(self, ignored):
|
||||
ff = os.path.join(self.basedir, self.furl_file)
|
||||
|
@ -5,6 +5,8 @@ from twisted.conch import error as conch_error
|
||||
from twisted.conch.ssh import keys
|
||||
|
||||
from allmydata.frontends import auth
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
|
||||
|
||||
DUMMY_KEY = keys.Key.fromString("""\
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
@ -37,7 +39,8 @@ class AccountFileCheckerKeyTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.account_file = filepath.FilePath(self.mktemp())
|
||||
self.account_file.setContent(DUMMY_ACCOUNTS)
|
||||
self.checker = auth.AccountFileChecker(None, self.account_file.path)
|
||||
abspath = abspath_expanduser_unicode(unicode(self.account_file.path))
|
||||
self.checker = auth.AccountFileChecker(None, abspath)
|
||||
|
||||
def test_unknown_user(self):
|
||||
"""
|
||||
|
@ -39,7 +39,7 @@ from twisted.python import usage
|
||||
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.encodingutil import listdir_unicode, unicode_platform, \
|
||||
quote_output, quote_local_unicode_path, get_io_encoding, get_filesystem_encoding, \
|
||||
quote_output, get_io_encoding, get_filesystem_encoding, \
|
||||
unicode_to_output, unicode_to_argv, to_str
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
|
||||
|
@ -61,7 +61,7 @@ from mock import patch
|
||||
import os, sys, locale
|
||||
|
||||
from allmydata.test.common_util import ReallyEqualMixin
|
||||
from allmydata.util import encodingutil
|
||||
from allmydata.util import encodingutil, fileutil
|
||||
from allmydata.util.encodingutil import argv_to_unicode, unicode_to_url, \
|
||||
unicode_to_output, quote_output, quote_path, quote_local_unicode_path, \
|
||||
unicode_platform, listdir_unicode, FilenameEncodingError, get_io_encoding, \
|
||||
@ -275,8 +275,8 @@ class StdlibUnicode(unittest.TestCase):
|
||||
# to lumiere_nfc (on Mac OS X, it will be the NFD equivalent).
|
||||
self.failUnlessIn(lumiere_nfc + ".txt", set([normalize(fname) for fname in filenames]))
|
||||
|
||||
expanded = os.path.expanduser("~/" + lumiere_nfc)
|
||||
self.failIfIn("~", expanded)
|
||||
expanded = fileutil.expanduser(u"~/" + lumiere_nfc)
|
||||
self.failIfIn(u"~", expanded)
|
||||
self.failUnless(expanded.endswith(lumiere_nfc), expanded)
|
||||
|
||||
def test_open_unrepresentable(self):
|
||||
|
@ -1,10 +1,13 @@
|
||||
import os, re, base64
|
||||
from cStringIO import StringIO
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, reactor
|
||||
|
||||
from allmydata import uri, client
|
||||
from allmydata.nodemaker import NodeMaker
|
||||
from allmydata.util import base32, consumer, fileutil, mathutil
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.hashutil import tagged_hash, ssk_writekey_hash, \
|
||||
ssk_pubkey_fingerprint_hash
|
||||
from allmydata.util.consumer import MemoryConsumer
|
||||
@ -3110,7 +3113,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
|
||||
fso = debug.FindSharesOptions()
|
||||
storage_index = base32.b2a(n.get_storage_index())
|
||||
fso.si_s = storage_index
|
||||
fso.nodedirs = [unicode(os.path.dirname(os.path.abspath(storedir)))
|
||||
fso.nodedirs = [os.path.dirname(abspath_expanduser_unicode(unicode(storedir)))
|
||||
for (i,ss,storedir)
|
||||
in self.iterate_servers()]
|
||||
fso.stdout = StringIO()
|
||||
|
@ -263,9 +263,11 @@ def read(path):
|
||||
finally:
|
||||
rf.close()
|
||||
|
||||
def put_file(pathname, inf):
|
||||
def put_file(path, inf):
|
||||
precondition_abspath(path)
|
||||
|
||||
# TODO: create temporary file and move into place?
|
||||
outf = open(os.path.expanduser(pathname), "wb")
|
||||
outf = open(path, "wb")
|
||||
try:
|
||||
while True:
|
||||
data = inf.read(32768)
|
||||
|
Loading…
Reference in New Issue
Block a user