From: Brian Warner Date: Tue, 3 Jun 2008 07:02:10 +0000 (-0700) Subject: mutable/publish.py: raise FileTooLargeError instead of an ugly assertion when the... X-Git-Tag: allmydata-tahoe-1.1.0~54 X-Git-Url: https://git.rkrishnan.org/%5B/%5D%20/uri/%22doc.html/quickstart.html?a=commitdiff_plain;h=f4496bd5533b92ee95c9029d230be929e07102cd;p=tahoe-lafs%2Ftahoe-lafs.git mutable/publish.py: raise FileTooLargeError instead of an ugly assertion when the SDMF restrictions are exceeded --- diff --git a/src/allmydata/mutable/publish.py b/src/allmydata/mutable/publish.py index 1d313918..a8c28003 100644 --- a/src/allmydata/mutable/publish.py +++ b/src/allmydata/mutable/publish.py @@ -5,7 +5,7 @@ from itertools import count from zope.interface import implements from twisted.internet import defer from twisted.python import failure -from allmydata.interfaces import IPublishStatus +from allmydata.interfaces import IPublishStatus, FileTooLargeError from allmydata.util import base32, hashutil, mathutil, idlib, log from allmydata import hashtree, codec, storage from pycryptopp.cipher.aes import AES @@ -136,6 +136,10 @@ class Publish: # 5: when enough responses are back, we're done self.log("starting publish, datalen is %s" % len(newdata)) + if len(newdata) > self.MAX_SEGMENT_SIZE: + raise FileTooLargeError("SDMF is limited to one segment, and " + "%d > %d" % (len(newdata), + self.MAX_SEGMENT_SIZE)) self._status.set_size(len(newdata)) self._status.set_status("Started") self._started = time.time() diff --git a/src/allmydata/test/test_mutable.py b/src/allmydata/test/test_mutable.py index ac045140..49b4c629 100644 --- a/src/allmydata/test/test_mutable.py +++ b/src/allmydata/test/test_mutable.py @@ -10,7 +10,8 @@ from allmydata.util.idlib import shortnodeid_b2a from allmydata.util.hashutil import tagged_hash from allmydata.util.fileutil import make_dirs from allmydata.encode import NotEnoughSharesError -from allmydata.interfaces import IURI, IMutableFileURI, IUploadable +from allmydata.interfaces import IURI, IMutableFileURI, IUploadable, \ + FileTooLargeError from foolscap.eventual import eventually, fireEventually from foolscap.logging import log import sha @@ -339,6 +340,21 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(_created) return d + def test_create_with_too_large_contents(self): + BIG = "a" * (Publish.MAX_SEGMENT_SIZE+1) + d = self.shouldFail(FileTooLargeError, "too_large", + "SDMF is limited to one segment, and %d > %d" % + (len(BIG), Publish.MAX_SEGMENT_SIZE), + self.client.create_mutable_file, BIG) + d.addCallback(lambda res: self.client.create_mutable_file("small")) + def _created(n): + return self.shouldFail(FileTooLargeError, "too_large_2", + "SDMF is limited to one segment, and %d > %d" % + (len(BIG), Publish.MAX_SEGMENT_SIZE), + n.overwrite, BIG) + d.addCallback(_created) + return d + def failUnlessCurrentSeqnumIs(self, n, expected_seqnum): d = n.get_servermap(MODE_READ) d.addCallback(lambda servermap: servermap.best_recoverable_version()) @@ -355,6 +371,8 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): return None def _error_modifier(old_contents): raise ValueError("oops") + def _toobig_modifier(old_contents): + return "b" * (Publish.MAX_SEGMENT_SIZE+1) calls = [] def _ucw_error_modifier(old_contents): # simulate an UncoordinatedWriteError once @@ -387,6 +405,14 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2")) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2)) + d.addCallback(lambda res: + self.shouldFail(FileTooLargeError, "toobig_modifier", + "SDMF is limited to one segment", + n.modify, _toobig_modifier)) + d.addCallback(lambda res: n.download_best_version()) + d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2")) + d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2)) + d.addCallback(lambda res: n.modify(_ucw_error_modifier)) d.addCallback(lambda res: self.failUnlessEqual(len(calls), 2)) d.addCallback(lambda res: n.download_best_version())