__version__ = "unknown"
try:
- from _version import __version__
+ from allmydata._version import __version__
except ImportError:
# We're running in a tree that hasn't run "./setup.py darcsver", and didn't
# come with a _version.py, so we don't know what our version is. This should
__appname__ = "unknown"
try:
- from _appname import __appname__
+ from allmydata._appname import __appname__
except ImportError:
# We're running in a tree that hasn't run "./setup.py". This shouldn't happen.
pass
# http://allmydata.org/trac/tahoe/wiki/Versioning
__full_version__ = __appname__ + '/' + str(__version__)
-import _auto_deps
+from allmydata import _auto_deps
_auto_deps.require_auto_deps()
import os, platform, re, subprocess, sys
d.addCallback(_got_parent)
return d
-from auth import AccountURLChecker, AccountFileChecker, NeedRootcapLookupScheme
+from allmydata.frontends.auth import AccountURLChecker, AccountFileChecker, NeedRootcapLookupScheme
class Dispatcher:
# then you get SFTPHandler(user)
components.registerAdapter(SFTPHandler, SFTPUser, ISFTPServer)
-from auth import AccountURLChecker, AccountFileChecker, NeedRootcapLookupScheme
+from allmydata.frontends.auth import AccountURLChecker, AccountFileChecker, NeedRootcapLookupScheme
class Dispatcher:
implements(portal.IRealm)
from allmydata.util import hashutil, base32
from allmydata import uri
import allmydata
-
-#import amdicon
-import amdlogo
+from allmydata.gui import amdlogo
from foolscap.api import Tub
from twisted.python import usage
from allmydata import client
from allmydata.gui.confwiz import ConfWizApp, ACCOUNT_PAGE, DEFAULT_SERVER_URL
from allmydata.scripts.common import get_aliases
-import amdicon
-import amdlogo
+from allmydata.gui import amdicon, amdlogo
DEFAULT_FUSE_TIMEOUT = 300
# This is for compatibilty with old .tac files, which reference
# allmydata.introducer.IntroducerNode
-from server import IntroducerNode
+from allmydata.introducer.server import IntroducerNode
# hush pyflakes
_unused = [IntroducerNode]
from allmydata.util import hashutil, base32, idlib, log
from allmydata.check_results import CheckAndRepairResults, CheckResults
-from common import MODE_CHECK, CorruptShareError
-from servermap import ServerMap, ServermapUpdater
-from layout import unpack_share, SIGNED_PREFIX_LENGTH
+from allmydata.mutable.common import MODE_CHECK, CorruptShareError
+from allmydata.mutable.servermap import ServerMap, ServermapUpdater
+from allmydata.mutable.layout import unpack_share, SIGNED_PREFIX_LENGTH
class MutableChecker:
from allmydata.monitor import Monitor
from pycryptopp.cipher.aes import AES
-from publish import Publish
-from common import MODE_READ, MODE_WRITE, UnrecoverableFileError, \
+from allmydata.mutable.publish import Publish
+from allmydata.mutable.common import MODE_READ, MODE_WRITE, UnrecoverableFileError, \
ResponseCache, UncoordinatedWriteError
-from servermap import ServerMap, ServermapUpdater
-from retrieve import Retrieve
-from checker import MutableChecker, MutableCheckAndRepairer
-from repairer import Repairer
+from allmydata.mutable.servermap import ServerMap, ServermapUpdater
+from allmydata.mutable.retrieve import Retrieve
+from allmydata.mutable.checker import MutableChecker, MutableCheckAndRepairer
+from allmydata.mutable.repairer import Repairer
class BackoffAgent:
import struct
-from common import NeedMoreDataError, UnknownVersionError
+from allmydata.mutable.common import NeedMoreDataError, UnknownVersionError
PREFIX = ">BQ32s16s" # each version has a different prefix
SIGNED_PREFIX = ">BQ32s16s BBQQ" # this is covered by the signature
from pycryptopp.cipher.aes import AES
from foolscap.api import eventually, fireEventually
-from common import MODE_WRITE, MODE_CHECK, DictOfSets, \
+from allmydata.mutable.common import MODE_WRITE, MODE_CHECK, DictOfSets, \
UncoordinatedWriteError, NotEnoughServersError
-from servermap import ServerMap
-from layout import pack_prefix, pack_share, unpack_header, pack_checkstring, \
+from allmydata.mutable.servermap import ServerMap
+from allmydata.mutable.layout import pack_prefix, pack_share, unpack_header, pack_checkstring, \
unpack_checkstring, SIGNED_PREFIX
class PublishStatus:
from pycryptopp.cipher.aes import AES
from pycryptopp.publickey import rsa
-from common import DictOfSets, CorruptShareError, UncoordinatedWriteError
-from layout import SIGNED_PREFIX, unpack_share_data
+from allmydata.mutable.common import DictOfSets, CorruptShareError, UncoordinatedWriteError
+from allmydata.mutable.layout import SIGNED_PREFIX, unpack_share_data
class RetrieveStatus:
implements(IRetrieveStatus)
from allmydata.interfaces import IServermapUpdaterStatus
from pycryptopp.publickey import rsa
-from common import MODE_CHECK, MODE_ANYTHING, MODE_WRITE, MODE_READ, \
+from allmydata.mutable.common import MODE_CHECK, MODE_ANYTHING, MODE_WRITE, MODE_READ, \
DictOfSets, CorruptShareError, NeedMoreDataError
-from layout import unpack_prefix_and_signature, unpack_header, unpack_share, \
+from allmydata.mutable.layout import unpack_prefix_and_signature, unpack_header, unpack_share, \
SIGNED_PREFIX_LENGTH
class UpdateStatus:
self.where = where
def consolidate(options):
- from consolidate import main; return main(options)
+ from allmydata.scripts.consolidate import main
+ return main(options)
class DebugCommand(usage.Options):
pkg_resources.require('allmydata-tahoe')
from allmydata.scripts.common import BaseOptions
-import debug, create_node, startstop_node, cli, keygen, stats_gatherer
+from allmydata.scripts import debug, create_node, startstop_node, cli, keygen, stats_gatherer
def GROUP(s):
# Usage.parseOptions compares argv[1] against command[0], so it will
import time, os, pickle, struct
-from crawler import ShareCrawler
-from shares import get_share_file
-from common import UnknownMutableContainerVersionError, \
+from allmydata.storage.crawler import ShareCrawler
+from allmydata.storage.shares import get_share_file
+from allmydata.storage.common import UnknownMutableContainerVersionError, \
UnknownImmutableContainerVersionError
from twisted.python import log as twlog
#! /usr/bin/python
-from mutable import MutableShareFile
-from immutable import ShareFile
+from allmydata.storage.mutable import MutableShareFile
+from allmydata.storage.immutable import ShareFile
def get_share_file(filename):
f = open(filename, "rb")
from allmydata.util.consumer import download_to_data
from allmydata.stats import StatsGathererService
from allmydata.key_generator import KeyGeneratorService
-import common_util as testutil
+import allmydata.test.common_util as testutil
from allmydata import immutable
from allmydata.monitor import Monitor
from allmydata.test.no_network import GridTestMixin
from allmydata.immutable.upload import Data
-from common_web import WebRenderingMixin
+from allmydata.test.common_web import WebRenderingMixin
class FakeClient:
def get_storage_broker(self):
from allmydata.interfaces import IFilesystemNode, IFileNode, \
IImmutableFileNode, IMutableFileNode, IDirectoryNode
from foolscap.api import flushEventualQueue
-import common_util as testutil
+import allmydata.test.common_util as testutil
class FakeIntroducerClient(IntroducerClient):
def __init__(self):
from allmydata.storage.server import StorageServer, si_b2a
from allmydata.storage.crawler import ShareCrawler, TimeSliceExceeded
-from test_storage import FakeCanary
-from common_util import StallMixin
+from allmydata.test.test_storage import FakeCanary
+from allmydata.test.common_util import StallMixin
class BucketEnumeratingCrawler(ShareCrawler):
cpu_slice = 500 # make sure it can complete in a single slice
from allmydata.unknown import UnknownNode, strip_prefix_for_ro
from allmydata.nodemaker import NodeMaker
from base64 import b32decode
-import common_util as testutil
+import allmydata.test.common_util as testutil
class MemAccum:
implements(IConsumer)
from allmydata.interfaces import IStorageBucketWriter, IStorageBucketReader, \
NotEnoughSharesError, IStorageBroker, UploadUnhappinessError
from allmydata.monitor import Monitor
-import common_util as testutil
+import allmydata.test.common_util as testutil
class LostPeerError(Exception):
pass
# test compatibility with old introducer .tac files
from allmydata.introducer import IntroducerNode
from allmydata.util import pollmixin
-import common_util as testutil
+import allmydata.test.common_util as testutil
class LoggingMultiService(service.MultiService):
def log(self, msg, **kw):
import re
from twisted.trial import unittest
from allmydata.util import iputil
-import common_util as testutil
+import allmydata.test.common_util as testutil
DOTTED_QUAD_RE=re.compile("^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$")
from allmydata.mutable.layout import unpack_header, unpack_share
from allmydata.mutable.repairer import MustForceRepairError
-import common_util as testutil
+import allmydata.test.common_util as testutil
# this "FakeStorage" exists to put the share data in RAM and avoid using real
# network connections, both to speed up the tests and to reduce the amount of
from twisted.application import service
from allmydata.node import Node, formatTimeTahoeStyle
from allmydata.util import fileutil
-import common_util as testutil
+import allmydata.test.common_util as testutil
class LoggingMultiService(service.MultiService):
def log(self, msg, **kw):
from twisted.internet import defer
from twisted.trial import unittest
import random
-from no_network import GridTestMixin
+from allmydata.test.no_network import GridTestMixin
# We'll allow you to pass this test even if you trigger eighteen times as
# many disk reads and block fetches as would be optimal.
from twisted.application import service
from allmydata.stats import CPUUsageMonitor
from allmydata.util import pollmixin
-import common_util as testutil
+import allmydata.test.common_util as testutil
class FasterMonitor(CPUUsageMonitor):
POLL_INTERVAL = 0.1
from allmydata.interfaces import FileTooLargeError, UploadUnhappinessError
from allmydata.util.assertutil import precondition
from allmydata.util.deferredutil import DeferredListShouldSucceed
-from allmydata.util.happinessutil import servers_of_happiness, \
- shares_by_server, merge_peers
from no_network import GridTestMixin
from common_util import ShouldFailMixin
from allmydata.storage_client import StorageFarmBroker
create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri
from allmydata.interfaces import IMutableFileNode
from allmydata.mutable import servermap, publish, retrieve
-import common_util as testutil
+import allmydata.test.common_util as testutil
from allmydata.test.no_network import GridTestMixin
from allmydata.test.common_web import HTTPClientGETFactory, \
HTTPClientHEADFactory
Tests useful in assertion checking, prints out nicely formated messages too.
"""
-from humanreadable import hr
+from allmydata.util.humanreadable import hr
def _assert(___cond=False, *___args, **___kwargs):
if ___cond:
# from the Python Standard Library
import string
-from assertutil import precondition
+from allmydata.util.assertutil import precondition
z_base_32_alphabet = "ybndrfg8ejkmcpqxot1uwisza345h769" # Zooko's choice, rationale in "DESIGN" doc
rfc3548_alphabet = "abcdefghijklmnopqrstuvwxyz234567" # RFC3548 standard used by Gnutella, Content-Addressable Web, THEX, Bitzi, Web-Calculus...
import copy, operator
from bisect import bisect_left, insort_left
-from assertutil import _assert, precondition
+from allmydata.util.assertutil import _assert, precondition
def move(k, d1, d2, strict=False):
"""
from twisted.python.procutils import which
from twisted.python import log
-# from allmydata.util
-import observer
+from allmydata.util import observer
try:
import resource
-import nummedobj
+from allmydata.util import nummedobj
from foolscap.logging import log
from twisted.python import log as tw_log
-import dictutil
+from allmydata.util import dictutil
class NummedObj(object):
"""
time.faketime = faketime
time.time = faketime
- from idlib import i2b
+ from allmydata.util.idlib import i2b
def fakeurandom(n):
if n > 20:
z = i2b(random.getrandbits(20*8))
# Transitive Grace Period Public License, version 1 or later.
from __future__ import division
-from mathutil import round_sigfigs
+from allmydata.util.mathutil import round_sigfigs
import math
import sys
import simplejson
from allmydata import get_package_versions_string
from allmydata.util import idlib
-from common import getxmlfile, get_arg
+from allmydata.web.common import getxmlfile, get_arg
class IntroducerRoot(rend.Page):