mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-20 05:28:04 +00:00
Change relative imports to absolute
This commit is contained in:
parent
02d0c4eb92
commit
e76092e16c
@ -39,7 +39,7 @@ finally:
|
||||
|
||||
__version__ = "unknown"
|
||||
try:
|
||||
from _version import __version__
|
||||
from allmydata._version import __version__
|
||||
except ImportError:
|
||||
# We're running in a tree that hasn't run "./setup.py darcsver", and didn't
|
||||
# come with a _version.py, so we don't know what our version is. This should
|
||||
@ -48,7 +48,7 @@ except ImportError:
|
||||
|
||||
__appname__ = "unknown"
|
||||
try:
|
||||
from _appname import __appname__
|
||||
from allmydata._appname import __appname__
|
||||
except ImportError:
|
||||
# We're running in a tree that hasn't run "./setup.py". This shouldn't happen.
|
||||
pass
|
||||
@ -58,7 +58,7 @@ except ImportError:
|
||||
# http://allmydata.org/trac/tahoe/wiki/Versioning
|
||||
__full_version__ = __appname__ + '/' + str(__version__)
|
||||
|
||||
import _auto_deps
|
||||
from allmydata import _auto_deps
|
||||
_auto_deps.require_auto_deps()
|
||||
|
||||
import os, platform, re, subprocess, sys
|
||||
|
@ -265,7 +265,7 @@ class Handler:
|
||||
d.addCallback(_got_parent)
|
||||
return d
|
||||
|
||||
from auth import AccountURLChecker, AccountFileChecker, NeedRootcapLookupScheme
|
||||
from allmydata.frontends.auth import AccountURLChecker, AccountFileChecker, NeedRootcapLookupScheme
|
||||
|
||||
|
||||
class Dispatcher:
|
||||
|
@ -402,7 +402,7 @@ class SFTPHandler:
|
||||
# then you get SFTPHandler(user)
|
||||
components.registerAdapter(SFTPHandler, SFTPUser, ISFTPServer)
|
||||
|
||||
from auth import AccountURLChecker, AccountFileChecker, NeedRootcapLookupScheme
|
||||
from allmydata.frontends.auth import AccountURLChecker, AccountFileChecker, NeedRootcapLookupScheme
|
||||
|
||||
class Dispatcher:
|
||||
implements(portal.IRealm)
|
||||
|
@ -24,9 +24,7 @@ from allmydata.util.assertutil import precondition
|
||||
from allmydata.util import hashutil, base32
|
||||
from allmydata import uri
|
||||
import allmydata
|
||||
|
||||
#import amdicon
|
||||
import amdlogo
|
||||
from allmydata.gui import amdlogo
|
||||
|
||||
from foolscap.api import Tub
|
||||
from twisted.python import usage
|
||||
|
@ -18,8 +18,7 @@ import allmydata
|
||||
from allmydata import client
|
||||
from allmydata.gui.confwiz import ConfWizApp, ACCOUNT_PAGE, DEFAULT_SERVER_URL
|
||||
from allmydata.scripts.common import get_aliases
|
||||
import amdicon
|
||||
import amdlogo
|
||||
from allmydata.gui import amdicon, amdlogo
|
||||
|
||||
DEFAULT_FUSE_TIMEOUT = 300
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
# This is for compatibilty with old .tac files, which reference
|
||||
# allmydata.introducer.IntroducerNode
|
||||
|
||||
from server import IntroducerNode
|
||||
from allmydata.introducer.server import IntroducerNode
|
||||
|
||||
# hush pyflakes
|
||||
_unused = [IntroducerNode]
|
||||
|
@ -6,9 +6,9 @@ from allmydata.uri import from_string
|
||||
from allmydata.util import hashutil, base32, idlib, log
|
||||
from allmydata.check_results import CheckAndRepairResults, CheckResults
|
||||
|
||||
from common import MODE_CHECK, CorruptShareError
|
||||
from servermap import ServerMap, ServermapUpdater
|
||||
from layout import unpack_share, SIGNED_PREFIX_LENGTH
|
||||
from allmydata.mutable.common import MODE_CHECK, CorruptShareError
|
||||
from allmydata.mutable.servermap import ServerMap, ServermapUpdater
|
||||
from allmydata.mutable.layout import unpack_share, SIGNED_PREFIX_LENGTH
|
||||
|
||||
class MutableChecker:
|
||||
|
||||
|
@ -12,13 +12,13 @@ from allmydata.uri import WriteableSSKFileURI, ReadonlySSKFileURI
|
||||
from allmydata.monitor import Monitor
|
||||
from pycryptopp.cipher.aes import AES
|
||||
|
||||
from publish import Publish
|
||||
from common import MODE_READ, MODE_WRITE, UnrecoverableFileError, \
|
||||
from allmydata.mutable.publish import Publish
|
||||
from allmydata.mutable.common import MODE_READ, MODE_WRITE, UnrecoverableFileError, \
|
||||
ResponseCache, UncoordinatedWriteError
|
||||
from servermap import ServerMap, ServermapUpdater
|
||||
from retrieve import Retrieve
|
||||
from checker import MutableChecker, MutableCheckAndRepairer
|
||||
from repairer import Repairer
|
||||
from allmydata.mutable.servermap import ServerMap, ServermapUpdater
|
||||
from allmydata.mutable.retrieve import Retrieve
|
||||
from allmydata.mutable.checker import MutableChecker, MutableCheckAndRepairer
|
||||
from allmydata.mutable.repairer import Repairer
|
||||
|
||||
|
||||
class BackoffAgent:
|
||||
|
@ -1,6 +1,6 @@
|
||||
|
||||
import struct
|
||||
from common import NeedMoreDataError, UnknownVersionError
|
||||
from allmydata.mutable.common import NeedMoreDataError, UnknownVersionError
|
||||
|
||||
PREFIX = ">BQ32s16s" # each version has a different prefix
|
||||
SIGNED_PREFIX = ">BQ32s16s BBQQ" # this is covered by the signature
|
||||
|
@ -12,10 +12,10 @@ from allmydata.storage.server import si_b2a
|
||||
from pycryptopp.cipher.aes import AES
|
||||
from foolscap.api import eventually, fireEventually
|
||||
|
||||
from common import MODE_WRITE, MODE_CHECK, DictOfSets, \
|
||||
from allmydata.mutable.common import MODE_WRITE, MODE_CHECK, DictOfSets, \
|
||||
UncoordinatedWriteError, NotEnoughServersError
|
||||
from servermap import ServerMap
|
||||
from layout import pack_prefix, pack_share, unpack_header, pack_checkstring, \
|
||||
from allmydata.mutable.servermap import ServerMap
|
||||
from allmydata.mutable.layout import pack_prefix, pack_share, unpack_header, pack_checkstring, \
|
||||
unpack_checkstring, SIGNED_PREFIX
|
||||
|
||||
class PublishStatus:
|
||||
|
@ -12,8 +12,8 @@ from allmydata.storage.server import si_b2a
|
||||
from pycryptopp.cipher.aes import AES
|
||||
from pycryptopp.publickey import rsa
|
||||
|
||||
from common import DictOfSets, CorruptShareError, UncoordinatedWriteError
|
||||
from layout import SIGNED_PREFIX, unpack_share_data
|
||||
from allmydata.mutable.common import DictOfSets, CorruptShareError, UncoordinatedWriteError
|
||||
from allmydata.mutable.layout import SIGNED_PREFIX, unpack_share_data
|
||||
|
||||
class RetrieveStatus:
|
||||
implements(IRetrieveStatus)
|
||||
|
@ -10,9 +10,9 @@ from allmydata.storage.server import si_b2a
|
||||
from allmydata.interfaces import IServermapUpdaterStatus
|
||||
from pycryptopp.publickey import rsa
|
||||
|
||||
from common import MODE_CHECK, MODE_ANYTHING, MODE_WRITE, MODE_READ, \
|
||||
from allmydata.mutable.common import MODE_CHECK, MODE_ANYTHING, MODE_WRITE, MODE_READ, \
|
||||
DictOfSets, CorruptShareError, NeedMoreDataError
|
||||
from layout import unpack_prefix_and_signature, unpack_header, unpack_share, \
|
||||
from allmydata.mutable.layout import unpack_prefix_and_signature, unpack_header, unpack_share, \
|
||||
SIGNED_PREFIX_LENGTH
|
||||
|
||||
class UpdateStatus:
|
||||
|
@ -771,7 +771,8 @@ class ConsolidateOptions(VDriveOptions):
|
||||
self.where = where
|
||||
|
||||
def consolidate(options):
|
||||
from consolidate import main; return main(options)
|
||||
from allmydata.scripts.consolidate import main
|
||||
return main(options)
|
||||
|
||||
|
||||
class DebugCommand(usage.Options):
|
||||
|
@ -8,7 +8,7 @@ from twisted.python import usage
|
||||
|
||||
pkg_resources.require('allmydata-tahoe')
|
||||
from allmydata.scripts.common import BaseOptions
|
||||
import debug, create_node, startstop_node, cli, keygen, stats_gatherer
|
||||
from allmydata.scripts import debug, create_node, startstop_node, cli, keygen, stats_gatherer
|
||||
|
||||
def GROUP(s):
|
||||
# Usage.parseOptions compares argv[1] against command[0], so it will
|
||||
|
@ -1,7 +1,7 @@
|
||||
import time, os, pickle, struct
|
||||
from crawler import ShareCrawler
|
||||
from shares import get_share_file
|
||||
from common import UnknownMutableContainerVersionError, \
|
||||
from allmydata.storage.crawler import ShareCrawler
|
||||
from allmydata.storage.shares import get_share_file
|
||||
from allmydata.storage.common import UnknownMutableContainerVersionError, \
|
||||
UnknownImmutableContainerVersionError
|
||||
from twisted.python import log as twlog
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
from mutable import MutableShareFile
|
||||
from immutable import ShareFile
|
||||
from allmydata.storage.mutable import MutableShareFile
|
||||
from allmydata.storage.immutable import ShareFile
|
||||
|
||||
def get_share_file(filename):
|
||||
f = open(filename, "rb")
|
||||
|
@ -21,7 +21,7 @@ from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.consumer import download_to_data
|
||||
from allmydata.stats import StatsGathererService
|
||||
from allmydata.key_generator import KeyGeneratorService
|
||||
import common_util as testutil
|
||||
import allmydata.test.common_util as testutil
|
||||
from allmydata import immutable
|
||||
|
||||
|
||||
|
@ -7,7 +7,7 @@ from allmydata.storage_client import StorageFarmBroker, NativeStorageClientDescr
|
||||
from allmydata.monitor import Monitor
|
||||
from allmydata.test.no_network import GridTestMixin
|
||||
from allmydata.immutable.upload import Data
|
||||
from common_web import WebRenderingMixin
|
||||
from allmydata.test.common_web import WebRenderingMixin
|
||||
|
||||
class FakeClient:
|
||||
def get_storage_broker(self):
|
||||
|
@ -12,7 +12,7 @@ from allmydata.util import base32, fileutil
|
||||
from allmydata.interfaces import IFilesystemNode, IFileNode, \
|
||||
IImmutableFileNode, IMutableFileNode, IDirectoryNode
|
||||
from foolscap.api import flushEventualQueue
|
||||
import common_util as testutil
|
||||
import allmydata.test.common_util as testutil
|
||||
|
||||
class FakeIntroducerClient(IntroducerClient):
|
||||
def __init__(self):
|
||||
|
@ -10,8 +10,8 @@ from allmydata.util import fileutil, hashutil, pollmixin
|
||||
from allmydata.storage.server import StorageServer, si_b2a
|
||||
from allmydata.storage.crawler import ShareCrawler, TimeSliceExceeded
|
||||
|
||||
from test_storage import FakeCanary
|
||||
from common_util import StallMixin
|
||||
from allmydata.test.test_storage import FakeCanary
|
||||
from allmydata.test.common_util import StallMixin
|
||||
|
||||
class BucketEnumeratingCrawler(ShareCrawler):
|
||||
cpu_slice = 500 # make sure it can complete in a single slice
|
||||
|
@ -22,7 +22,7 @@ from allmydata.test.no_network import GridTestMixin
|
||||
from allmydata.unknown import UnknownNode, strip_prefix_for_ro
|
||||
from allmydata.nodemaker import NodeMaker
|
||||
from base64 import b32decode
|
||||
import common_util as testutil
|
||||
import allmydata.test.common_util as testutil
|
||||
|
||||
class MemAccum:
|
||||
implements(IConsumer)
|
||||
|
@ -11,7 +11,7 @@ from allmydata.util.consumer import MemoryConsumer
|
||||
from allmydata.interfaces import IStorageBucketWriter, IStorageBucketReader, \
|
||||
NotEnoughSharesError, IStorageBroker, UploadUnhappinessError
|
||||
from allmydata.monitor import Monitor
|
||||
import common_util as testutil
|
||||
import allmydata.test.common_util as testutil
|
||||
|
||||
class LostPeerError(Exception):
|
||||
pass
|
||||
|
@ -14,7 +14,7 @@ from allmydata.introducer.server import IntroducerService
|
||||
# test compatibility with old introducer .tac files
|
||||
from allmydata.introducer import IntroducerNode
|
||||
from allmydata.util import pollmixin
|
||||
import common_util as testutil
|
||||
import allmydata.test.common_util as testutil
|
||||
|
||||
class LoggingMultiService(service.MultiService):
|
||||
def log(self, msg, **kw):
|
||||
|
@ -2,7 +2,7 @@
|
||||
import re
|
||||
from twisted.trial import unittest
|
||||
from allmydata.util import iputil
|
||||
import common_util as testutil
|
||||
import allmydata.test.common_util as testutil
|
||||
|
||||
DOTTED_QUAD_RE=re.compile("^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$")
|
||||
|
||||
|
@ -28,7 +28,7 @@ from allmydata.mutable.servermap import ServerMap, ServermapUpdater
|
||||
from allmydata.mutable.layout import unpack_header, unpack_share
|
||||
from allmydata.mutable.repairer import MustForceRepairError
|
||||
|
||||
import common_util as testutil
|
||||
import allmydata.test.common_util as testutil
|
||||
|
||||
# this "FakeStorage" exists to put the share data in RAM and avoid using real
|
||||
# network connections, both to speed up the tests and to reduce the amount of
|
||||
|
@ -8,7 +8,7 @@ from foolscap.api import flushEventualQueue
|
||||
from twisted.application import service
|
||||
from allmydata.node import Node, formatTimeTahoeStyle
|
||||
from allmydata.util import fileutil
|
||||
import common_util as testutil
|
||||
import allmydata.test.common_util as testutil
|
||||
|
||||
class LoggingMultiService(service.MultiService):
|
||||
def log(self, msg, **kw):
|
||||
|
@ -8,7 +8,7 @@ from allmydata.util.consumer import download_to_data
|
||||
from twisted.internet import defer
|
||||
from twisted.trial import unittest
|
||||
import random
|
||||
from no_network import GridTestMixin
|
||||
from allmydata.test.no_network import GridTestMixin
|
||||
|
||||
# We'll allow you to pass this test even if you trigger eighteen times as
|
||||
# many disk reads and block fetches as would be optimal.
|
||||
|
@ -3,7 +3,7 @@ from twisted.trial import unittest
|
||||
from twisted.application import service
|
||||
from allmydata.stats import CPUUsageMonitor
|
||||
from allmydata.util import pollmixin
|
||||
import common_util as testutil
|
||||
import allmydata.test.common_util as testutil
|
||||
|
||||
class FasterMonitor(CPUUsageMonitor):
|
||||
POLL_INTERVAL = 0.1
|
||||
|
@ -13,8 +13,6 @@ from allmydata.immutable import upload, encode
|
||||
from allmydata.interfaces import FileTooLargeError, UploadUnhappinessError
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.deferredutil import DeferredListShouldSucceed
|
||||
from allmydata.util.happinessutil import servers_of_happiness, \
|
||||
shares_by_server, merge_peers
|
||||
from no_network import GridTestMixin
|
||||
from common_util import ShouldFailMixin
|
||||
from allmydata.storage_client import StorageFarmBroker
|
||||
|
@ -24,7 +24,7 @@ from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
|
||||
create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri
|
||||
from allmydata.interfaces import IMutableFileNode
|
||||
from allmydata.mutable import servermap, publish, retrieve
|
||||
import common_util as testutil
|
||||
import allmydata.test.common_util as testutil
|
||||
from allmydata.test.no_network import GridTestMixin
|
||||
from allmydata.test.common_web import HTTPClientGETFactory, \
|
||||
HTTPClientHEADFactory
|
||||
|
@ -2,7 +2,7 @@
|
||||
Tests useful in assertion checking, prints out nicely formated messages too.
|
||||
"""
|
||||
|
||||
from humanreadable import hr
|
||||
from allmydata.util.humanreadable import hr
|
||||
|
||||
def _assert(___cond=False, *___args, **___kwargs):
|
||||
if ___cond:
|
||||
|
@ -1,7 +1,7 @@
|
||||
# from the Python Standard Library
|
||||
import string
|
||||
|
||||
from assertutil import precondition
|
||||
from allmydata.util.assertutil import precondition
|
||||
|
||||
z_base_32_alphabet = "ybndrfg8ejkmcpqxot1uwisza345h769" # Zooko's choice, rationale in "DESIGN" doc
|
||||
rfc3548_alphabet = "abcdefghijklmnopqrstuvwxyz234567" # RFC3548 standard used by Gnutella, Content-Addressable Web, THEX, Bitzi, Web-Calculus...
|
||||
|
@ -5,7 +5,7 @@ Tools to mess with dicts.
|
||||
import copy, operator
|
||||
from bisect import bisect_left, insort_left
|
||||
|
||||
from assertutil import _assert, precondition
|
||||
from allmydata.util.assertutil import _assert, precondition
|
||||
|
||||
def move(k, d1, d2, strict=False):
|
||||
"""
|
||||
|
@ -9,8 +9,7 @@ from twisted.internet.utils import getProcessOutput
|
||||
from twisted.python.procutils import which
|
||||
from twisted.python import log
|
||||
|
||||
# from allmydata.util
|
||||
import observer
|
||||
from allmydata.util import observer
|
||||
|
||||
try:
|
||||
import resource
|
||||
|
@ -1,4 +1,4 @@
|
||||
import nummedobj
|
||||
from allmydata.util import nummedobj
|
||||
|
||||
from foolscap.logging import log
|
||||
from twisted.python import log as tw_log
|
||||
|
@ -1,4 +1,4 @@
|
||||
import dictutil
|
||||
from allmydata.util import dictutil
|
||||
|
||||
class NummedObj(object):
|
||||
"""
|
||||
|
@ -35,7 +35,7 @@ def force_repeatability():
|
||||
time.faketime = faketime
|
||||
time.time = faketime
|
||||
|
||||
from idlib import i2b
|
||||
from allmydata.util.idlib import i2b
|
||||
def fakeurandom(n):
|
||||
if n > 20:
|
||||
z = i2b(random.getrandbits(20*8))
|
||||
|
@ -6,7 +6,7 @@
|
||||
# Transitive Grace Period Public License, version 1 or later.
|
||||
|
||||
from __future__ import division
|
||||
from mathutil import round_sigfigs
|
||||
from allmydata.util.mathutil import round_sigfigs
|
||||
import math
|
||||
import sys
|
||||
|
||||
|
@ -7,7 +7,7 @@ import allmydata
|
||||
import simplejson
|
||||
from allmydata import get_package_versions_string
|
||||
from allmydata.util import idlib
|
||||
from common import getxmlfile, get_arg
|
||||
from allmydata.web.common import getxmlfile, get_arg
|
||||
|
||||
class IntroducerRoot(rend.Page):
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user