from zope.interface import implements
from allmydata.interfaces import RIStorageServer, RIBucketWriter, \
RIBucketReader, IStorageBucketWriter, IStorageBucketReader, HASH_SIZE, \
- BadWriteEnablerError, IStatsProducer
+ BadWriteEnablerError, IStatsProducer, FileTooLargeError
from allmydata.util import base32, fileutil, idlib, mathutil, log
from allmydata.util.assertutil import precondition, _assert
import allmydata # for __version__
uri_extension_starts_at = wbp._offsets['uri_extension']
return uri_extension_starts_at + 4 + uri_extension_size
-class FileTooLargeError(Exception):
- pass
-
class WriteBucketProxy:
implements(IStorageBucketWriter)
def __init__(self, rref, data_size, segment_size, num_segments,
from twisted.internet import defer
from cStringIO import StringIO
-from allmydata import upload, encode, uri, storage
-from allmydata.interfaces import IFileURI
+from allmydata import upload, encode, uri
+from allmydata.interfaces import IFileURI, FileTooLargeError
from allmydata.util.assertutil import precondition
from allmydata.util.deferredutil import DeferredListShouldSucceed
from allmydata.util.testutil import ShouldFailMixin
k = 3; happy = 7; n = 10
self.set_encoding_parameters(k, happy, n)
data1 = GiganticUploadable(k*4*1024*1024*1024)
- d = self.shouldFail(storage.FileTooLargeError, "test_too_large-data1",
+ d = self.shouldFail(FileTooLargeError, "test_too_large-data1",
"This file is too large to be uploaded (data_size)",
self.u.upload, data1)
data2 = GiganticUploadable(k*4*1024*1024*1024-3)
d.addCallback(lambda res:
- self.shouldFail(storage.FileTooLargeError,
+ self.shouldFail(FileTooLargeError,
"test_too_large-data2",
"This file is too large to be uploaded (offsets)",
self.u.upload, data2))