From c6b65f32efb853eaf5f15d312e87b2d1b88d1c84 Mon Sep 17 00:00:00 2001 From: Daira Hopwood Date: Sat, 18 May 2013 02:34:20 +0100 Subject: [PATCH] Refactor mock_cloud methods to simplify and make them more like CommonContainerMixin. Also make sure that stripping of data arguments for logging is correct for all containers. Signed-off-by: Daira Hopwood --- .../storage/backends/cloud/cloud_common.py | 20 ++++++++-- .../storage/backends/cloud/mock_cloud.py | 37 +++++++++---------- .../storage/backends/cloud/s3/s3_container.py | 4 ++ src/allmydata/test/test_storage.py | 8 ++-- 4 files changed, 42 insertions(+), 27 deletions(-) diff --git a/src/allmydata/storage/backends/cloud/cloud_common.py b/src/allmydata/storage/backends/cloud/cloud_common.py index 7e41aaa7..fda61e66 100644 --- a/src/allmydata/storage/backends/cloud/cloud_common.py +++ b/src/allmydata/storage/backends/cloud/cloud_common.py @@ -350,16 +350,26 @@ class ContainerRetryMixin: """ def _react_to_error(self, response_code): - # The default policy is to retry on 5xx errors. + """ + The default policy is to retry on 5xx errors. + """ return response_code >= 500 and response_code < 600 + def _strip_data(self, args): + """ + By default retain only one argument (object_name) for logging. + Subclasses should override this if more than one argument is safe to log + (we want to avoid logging data). + """ + return args[:1] + def _do_request(self, description, operation, *args, **kwargs): d = defer.maybeDeferred(operation, *args, **kwargs) def _retry(f): d2 = self._handle_error(f, 1, None, description, operation, *args, **kwargs) def _trigger_incident(res): log.msg(format="error(s) on cloud container operation: %(description)s %(arguments)s %(kwargs)s %(res)s", - arguments=args[:2], kwargs=kwargs, description=description, res=res, + arguments=self._strip_data(args), kwargs=kwargs, description=description, res=res, level=log.WEIRD) return res d2.addBoth(_trigger_incident) @@ -375,7 +385,7 @@ class ContainerRetryMixin: if len(fargs) > 2 and fargs[2] and 'signaturedoesnotmatch' in fargs[2].lower(): fargs = fargs[:2] + ("SignatureDoesNotMatch response redacted",) + fargs[3:] - args_without_data = args[:2] + args_without_data = self._strip_data(args) msg = "try %d failed: %s %s %s" % (trynum, description, args_without_data, kwargs) err = CloudError(msg, *fargs) @@ -711,6 +721,8 @@ class CommonContainerMixin(HTTPClientMixin, ContainerRetryMixin): def __init__(self, container_name, override_reactor=None): self._container_name = container_name + + # Maybe this should be in HTTPClientMixin? self._reactor = override_reactor or reactor pool = HTTPConnectionPool(self._reactor) pool.maxPersistentPerHost = 20 @@ -727,6 +739,8 @@ class CommonContainerMixin(HTTPClientMixin, ContainerRetryMixin): return "%s/%s/%s" % (public_storage_url, urllib.quote(self._container_name, safe=''), urllib.quote(object_name)) + # Consider moving these to ContainerRetryMixin. + def create(self): return self._do_request('create container', self._create) diff --git a/src/allmydata/storage/backends/cloud/mock_cloud.py b/src/allmydata/storage/backends/cloud/mock_cloud.py index 744441f9..e6c333c7 100644 --- a/src/allmydata/storage/backends/cloud/mock_cloud.py +++ b/src/allmydata/storage/backends/cloud/mock_cloud.py @@ -7,6 +7,7 @@ from allmydata.util.deferredutil import async_iterate from zope.interface import implements +from allmydata.util.assertutil import _assert from allmydata.storage.backends.cloud.cloud_common import IContainer, \ CloudServiceError, ContainerItem, ContainerListing, \ ContainerRetryMixin, ContainerListMixin @@ -61,7 +62,7 @@ class MockContainer(ContainerRetryMixin, ContainerListMixin): sharefile = os.path.join(sidir, shnumstr) yield (sharefile, "%s/%s" % (sikey, shnumstr)) - def _list_some_objects(self, ign, prefix='', marker=None, max_keys=None): + def _list_some_objects(self, prefix='', marker=None, max_keys=None): if max_keys is None: max_keys = MAX_KEYS contents = [] @@ -93,53 +94,49 @@ class MockContainer(ContainerRetryMixin, ContainerListMixin): raise self.ServiceError("", 404, "not found") return sharefile - def _put_object(self, ign, object_name, data, content_type, metadata): - assert content_type is None, content_type - assert metadata == {}, metadata + def _put_object(self, object_name, data, content_type, metadata): + _assert(content_type == 'application/octet-stream', content_type=content_type) + _assert(metadata == {}, metadata=metadata) sharefile = self._get_path(object_name) fileutil.make_dirs(os.path.dirname(sharefile)) fileutil.write(sharefile, data) self._store_count += 1 return defer.succeed(None) - def _get_object(self, ign, object_name): + def _get_object(self, object_name): self._load_count += 1 data = fileutil.read(self._get_path(object_name, must_exist=True)) return defer.succeed(data) - def _head_object(self, ign, object_name): - return defer.execute(self._not_implemented) + def _head_object(self, object_name): + return defer.execute(_not_implemented) - def _delete_object(self, ign, object_name): + def _delete_object(self, object_name): fileutil.remove(self._get_path(object_name, must_exist=True)) return defer.succeed(None) - def _not_implemented(self): - raise NotImplementedError - # methods that use error handling from ContainerRetryMixin def create(self): - return self._do_request('create bucket', self._create, self.container_name) + return self._do_request('create bucket', self._create) def delete(self): - return self._do_request('delete bucket', self._delete, self.container_name) + return self._do_request('delete bucket', self._delete) def list_some_objects(self, **kwargs): - return self._do_request('list objects', self._list_some_objects, self.container_name, **kwargs) + return self._do_request('list objects', self._list_some_objects, **kwargs) - def put_object(self, object_name, data, content_type=None, metadata={}): - return self._do_request('PUT object', self._put_object, self.container_name, object_name, - data, content_type, metadata) + def put_object(self, object_name, data, content_type='application/octet-stream', metadata={}): + return self._do_request('PUT object', self._put_object, object_name, data, content_type, metadata) def get_object(self, object_name): - return self._do_request('GET object', self._get_object, self.container_name, object_name) + return self._do_request('GET object', self._get_object, object_name) def head_object(self, object_name): - return self._do_request('HEAD object', self._head_object, self.container_name, object_name) + return self._do_request('HEAD object', self._head_object, object_name) def delete_object(self, object_name): - return self._do_request('DELETE object', self._delete_object, self.container_name, object_name) + return self._do_request('DELETE object', self._delete_object, object_name) def reset_load_store_counts(self): self._load_count = 0 diff --git a/src/allmydata/storage/backends/cloud/s3/s3_container.py b/src/allmydata/storage/backends/cloud/s3/s3_container.py index 3391300a..84b9edb9 100644 --- a/src/allmydata/storage/backends/cloud/s3/s3_container.py +++ b/src/allmydata/storage/backends/cloud/s3/s3_container.py @@ -55,6 +55,10 @@ class S3Container(ContainerRetryMixin, ContainerListMixin): def __repr__(self): return ("<%s %r>" % (self.__class__.__name__, self.container_name,)) + def _strip_data(self, args): + # Retain up to two arguments (container_name and object_name) for logging. + return args[:2] + def create(self): return self._do_request('create bucket', self.client.create_bucket, self.container_name) diff --git a/src/allmydata/test/test_storage.py b/src/allmydata/test/test_storage.py index e0e6c0fd..0f757e3c 100644 --- a/src/allmydata/test/test_storage.py +++ b/src/allmydata/test/test_storage.py @@ -374,9 +374,9 @@ class CloudCommon(unittest.TestCase, ShouldFailMixin, WorkdirMixin): fileutil.make_dirs(basedir) class BadlyTruncatingMockContainer(MockContainer): - def _list_some_objects(self, container_name, prefix='', marker=None): + def _list_some_objects(self, prefix='', marker=None): contents = [ContainerItem("", None, "", 0, None, None)] - return defer.succeed(ContainerListing(container_name, "", "", 0, "true", contents)) + return defer.succeed(ContainerListing(self.container_name, "", "", 0, "true", contents)) s = {"level": 0} def call_log_msg(*args, **kwargs): @@ -2792,12 +2792,12 @@ class ServerWithMockCloudBackend(WithMockCloudBackend, ServerTest, unittest.Test t = {'count': 0} old_put_object = MockContainer._put_object - def call_put_object(self, ign, object_name, data, content_type=None, metadata={}): + def call_put_object(self, object_name, data, content_type=None, metadata={}): t['count'] += 1 if t['count'] <= failure_count: return defer.fail(CloudServiceError("XML", 500, "Internal error", "response")) else: - return old_put_object(self, ign, object_name, data, content_type=content_type, metadata=metadata) + return old_put_object(self, object_name, data, content_type=content_type, metadata=metadata) self.patch(MockContainer, '_put_object', call_put_object) def call_log_msg(*args, **kwargs): -- 2.45.2