From: robk-tahoe Date: Fri, 11 Apr 2008 00:25:44 +0000 (-0700) Subject: stats_gatherer: reconcile helper stats gathering X-Git-Url: https://git.rkrishnan.org/%5B/%5D%20/uri/frontends/FTP-and-SFTP.rst?a=commitdiff_plain;h=766deaa9b6462eb3ab3c30b04fa1f88c2876c38f;p=tahoe-lafs%2Ftahoe-lafs.git stats_gatherer: reconcile helper stats gathering I'd implemented stats gathering hooks in the helper a while back. Brian did the same without reference to my changes. This reconciles those two changes, encompassing all the stats in both changes, implemented through the stats_provider interface. this also provide templates for all 10 helper graphs in the tahoe-stats munin plugin. --- diff --git a/misc/munin/tahoe-stats.py b/misc/munin/tahoe-stats.py index 6c6a4bf6..60ba01a1 100644 --- a/misc/munin/tahoe-stats.py +++ b/misc/munin/tahoe-stats.py @@ -99,7 +99,7 @@ PLUGINS = { }, 'tahoe_helper_incoming_files': - { 'statid': 'chk_upload_helper.inc_count', + { 'statid': 'chk_upload_helper.incoming_count', 'category': 'stats', 'configheader': '\n'.join(['graph_title Tahoe Upload Helper Incoming File Count', 'graph_vlabel n files', @@ -113,7 +113,7 @@ PLUGINS = { ]), }, 'tahoe_helper_incoming_filesize': - { 'statid': 'chk_upload_helper.inc_size', + { 'statid': 'chk_upload_helper.incoming_size', 'category': 'stats', 'configheader': '\n'.join(['graph_title Tahoe Upload Helper Incoming File Size', 'graph_vlabel bytes', @@ -127,7 +127,7 @@ PLUGINS = { ]), }, 'tahoe_helper_incoming_files_old': - { 'statid': 'chk_upload_helper.inc_size_old', + { 'statid': 'chk_upload_helper.incoming_size_old', 'category': 'stats', 'configheader': '\n'.join(['graph_title Tahoe Upload Helper Incoming Old Files', 'graph_vlabel bytes', @@ -142,7 +142,7 @@ PLUGINS = { }, 'tahoe_helper_encoding_files': - { 'statid': 'chk_upload_helper.enc_count', + { 'statid': 'chk_upload_helper.encoding_count', 'category': 'stats', 'configheader': '\n'.join(['graph_title Tahoe Upload Helper Encoding File Count', 'graph_vlabel n files', @@ -156,7 +156,7 @@ PLUGINS = { ]), }, 'tahoe_helper_encoding_filesize': - { 'statid': 'chk_upload_helper.enc_size', + { 'statid': 'chk_upload_helper.encoding_size', 'category': 'stats', 'configheader': '\n'.join(['graph_title Tahoe Upload Helper Encoding File Size', 'graph_vlabel bytes', @@ -170,7 +170,7 @@ PLUGINS = { ]), }, 'tahoe_helper_encoding_files_old': - { 'statid': 'chk_upload_helper.enc_size_old', + { 'statid': 'chk_upload_helper.encoding_size_old', 'category': 'stats', 'configheader': '\n'.join(['graph_title Tahoe Upload Helper Encoding Old Files', 'graph_vlabel bytes', @@ -184,6 +184,92 @@ PLUGINS = { ]), }, + 'tahoe_helper_active_uploads': + { 'statid': 'chk_upload_helper.active_uploads', + 'category': 'stats', + 'configheader': '\n'.join(['graph_title Tahoe Upload Helper Active Files', + 'graph_vlabel n files', + 'graph_category tahoe_helper', + 'graph_info This graph shows number of files actively being processed by the helper', + ]), + 'graph_config': '\n'.join(['%(name)s.label %(name)s', + '%(name)s.draw LINE1', + ]), + 'graph_render': '\n'.join(['%(name)s.value %(value)s', + ]), + }, + + 'tahoe_helper_upload_requests': + { 'statid': 'chk_upload_helper.upload_requests', + 'category': 'stats', + 'configheader': '\n'.join(['graph_title Tahoe Upload Helper Upload Requests', + 'graph_vlabel requests', + 'graph_category tahoe_helper', + 'graph_info This graph shows the number of upload requests arriving at the helper', + ]), + 'graph_config': '\n'.join(['%(name)s.label %(name)s', + '%(name)s.draw LINE1', + ]), + 'graph_render': '\n'.join(['%(name)s.value %(value)s', + ]), + }, + 'tahoe_helper_upload_already_present': + { 'statid': 'chk_upload_helper.upload_already_present', + 'category': 'stats', + 'configheader': '\n'.join(['graph_title Tahoe Upload Helper Uploads Already Present', + 'graph_vlabel requests', + 'graph_category tahoe_helper', + 'graph_info This graph shows the number of uploads whose files are already present in the grid', + ]), + 'graph_config': '\n'.join(['%(name)s.label %(name)s', + '%(name)s.draw LINE1', + ]), + 'graph_render': '\n'.join(['%(name)s.value %(value)s', + ]), + }, + 'tahoe_helper_upload_need_upload': + { 'statid': 'chk_upload_helper.upload_need_upload', + 'category': 'stats', + 'configheader': '\n'.join(['graph_title Tahoe Upload Helper Uploads Needing Upload', + 'graph_vlabel requests', + 'graph_category tahoe_helper', + 'graph_info This graph shows the number of uploads whose files are not already present in the grid', + ]), + 'graph_config': '\n'.join(['%(name)s.label %(name)s', + '%(name)s.draw LINE1', + ]), + 'graph_render': '\n'.join(['%(name)s.value %(value)s', + ]), + }, + 'tahoe_helper_encoded_bytes': + { 'statid': 'chk_upload_helper.encoded_bytes', + 'category': 'stats', + 'configheader': '\n'.join(['graph_title Tahoe Upload Helper Encoded Bytes', + 'graph_vlabel bytes', + 'graph_category tahoe_helper', + 'graph_info This graph shows the number of bytes encoded by the helper', + ]), + 'graph_config': '\n'.join(['%(name)s.label %(name)s', + '%(name)s.draw LINE1', + ]), + 'graph_render': '\n'.join(['%(name)s.value %(value)s', + ]), + }, + 'tahoe_helper_fetched_bytes': + { 'statid': 'chk_upload_helper.fetched_bytes', + 'category': 'stats', + 'configheader': '\n'.join(['graph_title Tahoe Upload Helper Fetched Bytes', + 'graph_vlabel bytes', + 'graph_category tahoe_helper', + 'graph_info This graph shows the number of bytes fetched by the helper', + ]), + 'graph_config': '\n'.join(['%(name)s.label %(name)s', + '%(name)s.draw LINE1', + ]), + 'graph_render': '\n'.join(['%(name)s.value %(value)s', + ]), + }, + } def smash_name(name): diff --git a/src/allmydata/offloaded.py b/src/allmydata/offloaded.py index 751c94b1..543fa0a3 100644 --- a/src/allmydata/offloaded.py +++ b/src/allmydata/offloaded.py @@ -377,7 +377,9 @@ class CHKCiphertextFetcher(AskUntilSuccessMixin): self._f.write(data) self._have += len(data) self._ciphertext_fetched += len(data) - self._upload_helper._helper._stats["CHK_fetched_bytes"] += len(data) + stats_provider = self._upload_helper._helper.stats_provider + if stats_provider: + stats_provider.count("chk_upload_helper.fetched_bytes", len(data)) return False # not done d.addCallback(_got_data) return d @@ -476,39 +478,10 @@ class Helper(Referenceable, service.MultiService): self._active_uploads = {} if stats_provider: stats_provider.register_producer(self) - self._stats = {"CHK_upload_requests": 0, - "CHK_upload_already_present": 0, - "CHK_upload_need_upload": 0, - "CHK_fetched_bytes": 0, - "CHK_encoded_bytes": 0, - } service.MultiService.__init__(self) def setServiceParent(self, parent): service.MultiService.setServiceParent(self, parent) - stats = parent.stats_provider - if stats: - stats.register_producer(self) - - def get_stats(self): - chk_incoming_files, chk_incoming_size = 0,0 - chk_encoding_files, chk_encoding_size = 0,0 - for fn in os.listdir(self._chk_incoming): - size = os.stat(os.path.join(self._chk_incoming, fn))[stat.ST_SIZE] - chk_incoming_files += 1 - chk_incoming_size += size - for fn in os.listdir(self._chk_encoding): - size = os.stat(os.path.join(self._chk_encoding, fn))[stat.ST_SIZE] - chk_encoding_files += 1 - chk_encoding_size += size - stats = {"CHK_active_uploads": len(self._active_uploads), - "CHK_incoming_files": chk_incoming_files, - "CHK_incoming_size": chk_incoming_size, - "CHK_encoding_files": chk_encoding_files, - "CHK_encoding_size": chk_encoding_size, - } - stats.update(self._stats) - return {"helper": stats} def log(self, *args, **kwargs): if 'facility' not in kwargs: @@ -538,16 +511,18 @@ class Helper(Referenceable, service.MultiService): enc_size += size if now - mtime > OLD: enc_size_old += size - return { 'chk_upload_helper.inc_count': inc_count, - 'chk_upload_helper.inc_size': inc_size, - 'chk_upload_helper.inc_size_old': inc_size_old, - 'chk_upload_helper.enc_count': enc_count, - 'chk_upload_helper.enc_size': enc_size, - 'chk_upload_helper.enc_size_old': enc_size_old, + return { 'chk_upload_helper.active_uploads': len(self._active_uploads), + 'chk_upload_helper.incoming_count': inc_count, + 'chk_upload_helper.incoming_size': inc_size, + 'chk_upload_helper.incoming_size_old': inc_size_old, + 'chk_upload_helper.encoding_count': enc_count, + 'chk_upload_helper.encoding_size': enc_size, + 'chk_upload_helper.encoding_size_old': enc_size_old, } def remote_upload_chk(self, storage_index): - self._stats["CHK_upload_requests"] += 1 + if self.stats_provider: + self.stats_provider.count("chk_upload_helper.upload_requests") r = upload.UploadResults() started = time.time() si_s = storage.si_b2a(storage_index) @@ -565,11 +540,13 @@ class Helper(Referenceable, service.MultiService): r.timings['existence_check'] = elapsed if already_present: # the necessary results are placed in the UploadResults - self._stats["CHK_upload_already_present"] += 1 + if self.stats_provider: + self.stats_provider.count("chk_upload_helper.upload_already_present") self.log("file already found in grid", parent=lp) return (r, None) - self._stats["CHK_upload_need_upload"] += 1 + if self.stats_provider: + self.stats_provider.count("chk_upload_helper.upload_need_upload") # the file is not present in the grid, by which we mean there are # less than 'N' shares available. self.log("unable to find file in the grid", parent=lp, @@ -620,5 +597,6 @@ class Helper(Referenceable, service.MultiService): return d def upload_finished(self, storage_index, size): - self._stats["CHK_encoded_bytes"] += size + if self.stats_provider: + self.stats_provider.count("chk_upload_helper.encoded_bytes", size) del self._active_uploads[storage_index] diff --git a/src/allmydata/stats.py b/src/allmydata/stats.py index f5b6db05..96fea7a6 100644 --- a/src/allmydata/stats.py +++ b/src/allmydata/stats.py @@ -96,7 +96,7 @@ class StatsProvider(foolscap.Referenceable, service.MultiService): d.addCallback(connect) service.MultiService.startService(self) - def count(self, name, delta): + def count(self, name, delta=1): val = self.counters.setdefault(name, 0) self.counters[name] = val + delta