]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/commitdiff
Add CorruptStoredShareError as a superclass for all corrupt share errors raised by...
authorDaira Hopwood <daira@jacaranda.org>
Wed, 9 Apr 2014 00:35:52 +0000 (01:35 +0100)
committerDaira Hopwood <daira@jacaranda.org>
Tue, 4 Aug 2015 17:20:42 +0000 (18:20 +0100)
Signed-off-by: Daira Hopwood <daira@jacaranda.org>
src/allmydata/storage/backends/cloud/immutable.py
src/allmydata/storage/backends/cloud/mutable.py
src/allmydata/storage/backends/disk/disk_backend.py
src/allmydata/storage/backends/disk/immutable.py
src/allmydata/storage/backends/disk/mutable.py
src/allmydata/storage/common.py

index cc29298200454345aa2210b1983c719b11745998..45e60bd1dd8aa35f4d9a005cf3c817b76f9caa06 100644 (file)
@@ -10,7 +10,8 @@ from allmydata.interfaces import IShareForReading, IShareForWriting
 
 from allmydata.util.assertutil import precondition, _assert
 from allmydata.util.mathutil import div_ceil
-from allmydata.storage.common import UnknownImmutableContainerVersionError, DataTooLargeError
+from allmydata.storage.common import CorruptStoredShareError, UnknownImmutableContainerVersionError, \
+     DataTooLargeError
 from allmydata.storage.backends.cloud import cloud_common
 from allmydata.storage.backends.cloud.cloud_common import get_chunk_key, \
      BackpressurePipeline, ChunkCache, CloudShareBase, CloudShareReaderMixin
@@ -157,7 +158,7 @@ class ImmutableCloudShareForReading(CloudShareBase, ImmutableCloudShareMixin, Cl
         chunksize = len(first_chunkdata)
         if chunksize < self.HEADER_SIZE:
             msg = "%r had incomplete header (%d bytes)" % (self, chunksize)
-            raise UnknownImmutableContainerVersionError(msg)
+            raise UnknownImmutableContainerVersionError(shnum, msg)
 
         self._total_size = total_size
         self._chunksize = chunksize
@@ -167,19 +168,22 @@ class ImmutableCloudShareForReading(CloudShareBase, ImmutableCloudShareMixin, Cl
         #print "ImmutableCloudShareForReading", total_size, chunksize, self._key
 
         header = first_chunkdata[:self.HEADER_SIZE]
-        (version, unused, num_leases) = struct.unpack(self.HEADER, header)
+        try:
+            (version, unused, num_leases) = struct.unpack(self.HEADER, header)
+        except struct.error, e:
+            raise CorruptStoredShareError(shnum, "invalid immutable share header for shnum %d: %s" % (shnum, e))
 
         if version != 1:
             msg = "%r had version %d but we wanted 1" % (self, version)
-            raise UnknownImmutableContainerVersionError(msg)
+            raise UnknownImmutableContainerVersionError(shnum, msg)
 
         # We cannot write leases in share files, but allow them to be present
         # in case a share file is copied from a disk backend, or in case we
         # need them in future.
         self._data_length = total_size - self.DATA_OFFSET - (num_leases * self.LEASE_SIZE)
 
-        # TODO: raise a better exception.
-        _assert(self._data_length >= 0, data_length=self._data_length)
+        if self._data_length < 0:
+            raise CorruptStoredShareError("calculated data length for shnum %d is %d" % (shnum, self._data_length))
 
     # Boilerplate is in CloudShareBase, read implementation is in CloudShareReaderMixin.
     # So nothing to implement here. Yay!
index f8ad0969169252611b4a6b4dac176336449bbd18..7ee1af62c20de9d83054d37c4146d28bce7d09e3 100644 (file)
@@ -12,7 +12,8 @@ from allmydata.util import idlib, log
 from allmydata.util.assertutil import precondition, _assert
 from allmydata.util.mathutil import div_ceil
 from allmydata.util.hashutil import timing_safe_compare
-from allmydata.storage.common import UnknownMutableContainerVersionError, DataTooLargeError
+from allmydata.storage.common import CorruptStoredShareError, UnknownMutableContainerVersionError, \
+     DataTooLargeError
 from allmydata.storage.backends.base import testv_compare
 from allmydata.mutable.layout import MUTABLE_MAGIC, MAX_MUTABLE_SHARE_SIZE
 from allmydata.storage.backends.cloud import cloud_common
@@ -73,15 +74,18 @@ class MutableCloudShare(CloudShareBase, CloudShareReaderMixin):
 
         if len(first_chunkdata) < self.HEADER_SIZE:
             msg = "%r had incomplete header (%d bytes)" % (self, len(first_chunkdata))
-            raise UnknownMutableContainerVersionError(msg)
+            raise UnknownMutableContainerVersionError(shnum, msg)
 
         header = first_chunkdata[:self.HEADER_SIZE]
-        (magic, write_enabler_nodeid, real_write_enabler,
-         data_length, extra_lease_offset) = struct.unpack(self.HEADER, header)
+        try:
+            (magic, write_enabler_nodeid, real_write_enabler,
+             data_length, extra_lease_offset) = struct.unpack(self.HEADER, header)
+        except struct.error, e:
+            raise CorruptStoredShareError(shnum, "invalid mutable share header for shnum %d: %s" % (shnum, e))
 
         if magic != self.MAGIC:
             msg = "%r had magic %r but we wanted %r" % (self, magic, self.MAGIC)
-            raise UnknownMutableContainerVersionError(msg)
+            raise UnknownMutableContainerVersionError(shnum, msg)
 
         self._write_enabler_nodeid = write_enabler_nodeid
         self._real_write_enabler = real_write_enabler
index e3d09a53046c66cf9a033ab7ca7192c5bc891b72..5255cdfe5ffd8e448e9b070e2a03777dde6fe9c3 100644 (file)
@@ -1,13 +1,12 @@
 
-import struct, os.path
+import os.path
 
 from twisted.internet import defer
 
 from zope.interface import implements
 from allmydata.interfaces import IStorageBackend, IShareSet
 from allmydata.util import fileutil, log
-from allmydata.storage.common import si_b2a, si_a2b, NUM_RE, \
-     UnknownMutableContainerVersionError, UnknownImmutableContainerVersionError
+from allmydata.storage.common import si_b2a, si_a2b, NUM_RE, CorruptStoredShareError
 from allmydata.storage.bucket import BucketWriter
 from allmydata.storage.backends.base import Backend, ShareSet
 from allmydata.storage.backends.disk.immutable import load_immutable_disk_share, create_immutable_disk_share
@@ -141,9 +140,7 @@ class DiskShareSet(ShareSet):
             sharefile = os.path.join(self._sharehomedir, shnumstr)
             try:
                 shares[shnum] = get_disk_share(sharefile, si, shnum)
-            except (UnknownMutableContainerVersionError,
-                    UnknownImmutableContainerVersionError,
-                    struct.error):
+            except CorruptStoredShareError:
                 corrupted.add(shnum)
 
         valid = [shares[shnum] for shnum in sorted(shares.keys())]
index b7bba0db0b1f758959a43c012f235de96f84527d..056863c5b151ee431dc5ee004a4c5a2911c960b7 100644 (file)
@@ -8,7 +8,7 @@ from allmydata.interfaces import IShareForReading, IShareForWriting
 
 from allmydata.util import fileutil
 from allmydata.util.assertutil import precondition, _assert
-from allmydata.storage.common import si_b2a, UnknownImmutableContainerVersionError, \
+from allmydata.storage.common import si_b2a, CorruptStoredShareError, UnknownImmutableContainerVersionError, \
      DataTooLargeError
 
 
@@ -79,17 +79,19 @@ class ImmutableDiskShare(object):
             f = open(self._home, 'rb')
             try:
                 (version, unused, num_leases) = struct.unpack(self.HEADER, f.read(self.HEADER_SIZE))
+            except struct.error, e:
+                raise CorruptStoredShareError(shnum, "invalid immutable share header for shnum %d: %s" % (shnum, e))
             finally:
                 f.close()
             if version != 1:
                 msg = "sharefile %r had version %d but we wanted 1" % (self._home, version)
-                raise UnknownImmutableContainerVersionError(msg)
+                raise UnknownImmutableContainerVersionError(shnum, msg)
 
             filesize = os.stat(self._home).st_size
             self._data_length = filesize - self.DATA_OFFSET - (num_leases * self.LEASE_SIZE)
 
-        # TODO: raise a better exception.
-        _assert(self._data_length >= 0, data_length=self._data_length)
+        if self._data_length < 0:
+            raise CorruptStoredShareError("calculated data length for shnum %d is %d" % (shnum, self._data_length))
 
     def __repr__(self):
         return ("<ImmutableDiskShare %s:%r at %r>"
index 8eaadd2565adb785b41eb3f803b5bee72ce1dcd0..253df514704adf26c90971aff5f33ac64e30e7fa 100644 (file)
@@ -9,7 +9,7 @@ from allmydata.interfaces import IMutableShare, BadWriteEnablerError
 from allmydata.util import fileutil, idlib, log
 from allmydata.util.assertutil import precondition, _assert
 from allmydata.util.hashutil import timing_safe_compare
-from allmydata.storage.common import si_b2a, UnknownMutableContainerVersionError, \
+from allmydata.storage.common import si_b2a, CorruptStoredShareError, UnknownMutableContainerVersionError, \
      DataTooLargeError
 from allmydata.storage.backends.base import testv_compare
 from allmydata.mutable.layout import MUTABLE_MAGIC, MAX_MUTABLE_SHARE_SIZE
@@ -72,7 +72,9 @@ class MutableDiskShare(object):
                 if magic != self.MAGIC:
                     msg = "sharefile %r had magic '%r' but we wanted '%r'" % \
                           (self._home, magic, self.MAGIC)
-                    raise UnknownMutableContainerVersionError(msg)
+                    raise UnknownMutableContainerVersionError(shnum, msg)
+            except struct.error, e:
+                raise CorruptStoredShareError(shnum, "invalid mutable share header for shnum %d: %s" % (shnum, e))
             finally:
                 f.close()
         self.parent = parent # for logging
index d2170426876972ee00d4f5f9ab9df0b61fe0d72d..5417da30e6d51dc89b6f222b6c0a621a6623fb2c 100644 (file)
@@ -13,7 +13,13 @@ PREFIX = re.compile("^[%s]{2}$" % (base32.z_base_32_alphabet,))
 class DataTooLargeError(Exception):
     pass
 
-class UnknownContainerVersionError(Exception):
+
+class CorruptStoredShareError(Exception):
+    def __init__(self, shnum, *rest):
+        Exception.__init__(self, shnum, *rest)
+        self.shnum = shnum
+
+class UnknownContainerVersionError(CorruptStoredShareError):
     pass
 
 class UnknownMutableContainerVersionError(UnknownContainerVersionError):