From: David Stainton Date: Tue, 18 Aug 2015 14:33:29 +0000 (+0000) Subject: Add some print statements. X-Git-Url: https://git.rkrishnan.org/simplejson/rgr-080307.php?a=commitdiff_plain;h=195d4befc1e452a303ae19ff0aaed2096fe07127;p=tahoe-lafs%2Ftahoe-lafs.git Add some print statements. --- diff --git a/src/allmydata/frontends/magic_folder.py b/src/allmydata/frontends/magic_folder.py index 4a4db449..92734263 100644 --- a/src/allmydata/frontends/magic_folder.py +++ b/src/allmydata/frontends/magic_folder.py @@ -76,8 +76,14 @@ class MagicFolder(service.MultiService): self.downloader.start_scanning() def finish(self): + print "finish" d = self.uploader.stop() + def _print(f): + print f + return f + d.addErrback(_print) d.addBoth(lambda ign: self.downloader.stop()) + d.addErrback(_print) return d def remove_service(self): @@ -196,12 +202,17 @@ class Uploader(QueueMixin): return d def stop(self): + print "stop: _deque = %r, _pending = %r" % (self._deque, self._pending) self._notifier.stopReading() self._count('dirs_monitored', -1) if hasattr(self._notifier, 'wait_until_stopped'): d = self._notifier.wait_until_stopped() else: d = defer.succeed(None) + def _after(res): + print "stop _after: res = %r, _deque = %r, _pending = %r" % (res, self._deque, self._pending) + return res + d.addBoth(_after) return d def start_scanning(self): @@ -404,15 +415,22 @@ class Downloader(QueueMixin): self._turn_delay = 3 # delay between remote scans self._download_scan_batch = {} # path -> [(filenode, metadata)] + print "Downloader init" def start_scanning(self): + print "downloader start_scanning" self._scan_remote_collective() self._turn_deque() def stop(self): + print "downloader stop" self._stopped = True d = defer.succeed(None) d.addCallback(lambda ign: self._lazy_tail) + def _print(res): + print "downloader stop _after: res = %r, _deque = %r, _pending = %r" % (res, self._deque, self._pending) + return res + d.addBoth(_print) return d def _should_download(self, relpath_u, remote_version): @@ -421,6 +439,7 @@ class Downloader(QueueMixin): We check the remote metadata version against our magic-folder db version number; latest version wins. """ + print "_should_download" v = self._db.get_local_file_version(relpath_u) print "_should_download path %s local db version %s, remote dmd version %s" % (relpath_u, v, remote_version) return (v is None or v < remote_version) @@ -466,24 +485,34 @@ class Downloader(QueueMixin): return collective_dirmap_d def _scan_remote(self, nickname, dirnode): + print "_scan_remote START: nickname %s dirnode %s" % (nickname, dirnode) listing_d = dirnode.list() def scan_listing(listing_map): for name in listing_map.keys(): + print "name ", name file_node, metadata = listing_map[name] local_version = self._get_local_latest(name) # XXX we might need to convert first? if local_version is not None: if local_version >= metadata['version']: return None else: + print "ALL KEYS %s" % (self._download_scan_batch.keys(),) if self._download_scan_batch.has_key(name): + print "HAS KEY - %s %s" % (file_node, metadata) self._download_scan_batch[name] += [(file_node, metadata)] else: + print "NOT HAS KEY" self._download_scan_batch[name] = [(file_node, metadata)] + + print "download scan batch before filtering", repr(self._download_scan_batch) listing_d.addCallback(scan_listing) + print "_scan_remote END" return listing_d def _scan_remote_collective(self): + print "downloader _scan_remote_collective" self._download_scan_batch = {} # XXX + if self._collective_dirnode is None: return collective_dirmap_d = self._collective_dirnode.list() @@ -500,7 +529,12 @@ class Downloader(QueueMixin): return d collective_dirmap_d.addCallback(scan_collective) collective_dirmap_d.addCallback(self._filter_scan_batch) + def _print(f): + print f + return f + collective_dirmap_d.addErrback(_print) collective_dirmap_d.addCallback(self._add_batch_to_download_queue) + print "end of _scan_remote_collective" return collective_dirmap_d def _add_batch_to_download_queue(self, result): @@ -508,21 +542,30 @@ class Downloader(QueueMixin): self._pending.update(map(lambda x: x[0], result)) def _filter_scan_batch(self, result): + print "FILTER START len %s" % (len(self._download_scan_batch),) extension = [] # consider whether this should be a dict for name in self._download_scan_batch.keys(): if name in self._pending: + print "downloader: %s found in pending; skipping" % (name,) continue file_node, metadata = max(self._download_scan_batch[name], key=lambda x: x[1]['version']) + print "file_node %s metadata %s" % (file_node, metadata) if self._should_download(name, metadata['version']): + print "should download" extension += [(name, file_node, metadata)] + else: + print "should not download" + print "FILTER END" return extension def _when_queue_is_empty(self): + print "_when_queue_is_empty" d = task.deferLater(reactor, self._turn_delay, self._scan_remote_collective) d.addCallback(lambda ign: self._turn_deque()) return d def _process(self, item): + print "_process" (name, file_node, metadata) = item d = file_node.download_best_version() def succeeded(res): @@ -543,11 +586,14 @@ class Downloader(QueueMixin): self._count('objects_download_failed') return f def remove_from_pending(ign): + print "REMOVE FROM PENDING _pending = %r, name = %r" % (self._pending, name) self._pending.remove(name) + print "REMOVE FROM PENDING _after: _pending = %r" % (self._pending,) d.addCallbacks(succeeded, failed) d.addBoth(self._do_callback) d.addCallback(remove_from_pending) return d def _write_downloaded_file(self, name, file_contents): + print "_write_downloaded_file" fileutil.write(name, file_contents)