]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/blobdiff - src/allmydata/frontends/magic_folder.py
WIP: Refactoring to get db fields in one query.
[tahoe-lafs/tahoe-lafs.git] / src / allmydata / frontends / magic_folder.py
index 0a5211aefe30552cb2a97eecf214b3e09e111438..93463e7aa958994b5abc5f07476a68fe02b89870 100644 (file)
@@ -46,11 +46,13 @@ class MagicFolder(service.MultiService):
     name = 'magic-folder'
 
     def __init__(self, client, upload_dircap, collective_dircap, local_path_u, dbfile,
-                 pending_delay=1.0, clock=reactor):
+                 pending_delay=1.0, clock=None):
         precondition_abspath(local_path_u)
 
         service.MultiService.__init__(self)
 
+        immediate = clock is not None
+        clock = clock or reactor
         db = magicfolderdb.get_magicfolderdb(dbfile, create_version=(magicfolderdb.SCHEMA_v1, 1))
         if db is None:
             return Failure(Exception('ERROR: Unable to load magic folder db.'))
@@ -62,8 +64,9 @@ class MagicFolder(service.MultiService):
         upload_dirnode = self._client.create_node_from_uri(upload_dircap)
         collective_dirnode = self._client.create_node_from_uri(collective_dircap)
 
-        self.uploader = Uploader(client, local_path_u, db, upload_dirnode, pending_delay, clock)
-        self.downloader = Downloader(client, local_path_u, db, collective_dirnode, upload_dirnode.get_readonly_uri(), clock)
+        self.uploader = Uploader(client, local_path_u, db, upload_dirnode, pending_delay, clock, immediate)
+        self.downloader = Downloader(client, local_path_u, db, collective_dirnode,
+                                     upload_dirnode.get_readonly_uri(), clock, self.uploader.is_pending)
 
     def startService(self):
         # TODO: why is this being called more than once?
@@ -163,10 +166,12 @@ class QueueMixin(HookMixin):
 
 
 class Uploader(QueueMixin):
-    def __init__(self, client, local_path_u, db, upload_dirnode, pending_delay, clock):
+    def __init__(self, client, local_path_u, db, upload_dirnode, pending_delay, clock,
+                 immediate=False):
         QueueMixin.__init__(self, client, local_path_u, db, 'uploader', clock)
 
         self.is_ready = False
+        self._immediate = immediate
 
         if not IDirectoryNode.providedBy(upload_dirnode):
             raise AssertionError("The URI in '%s' does not refer to a directory."
@@ -261,6 +266,9 @@ class Uploader(QueueMixin):
 
         return d
 
+    def is_pending(self, relpath_u):
+        return relpath_u in self._pending
+
     def _notify(self, opaque, path, events_mask):
         self._log("inotify event %r, %r, %r\n" % (opaque, path, ', '.join(self._inotify.humanReadableMask(events_mask))))
         relpath_u = self._get_relpath(path)
@@ -288,7 +296,10 @@ class Uploader(QueueMixin):
         self._pending.add(relpath_u)
         self._count('objects_queued')
         if self.is_ready:
-            self._clock.callLater(0, self._turn_deque)
+            if self._immediate:  # for tests
+                self._turn_deque()
+            else:
+                self._clock.callLater(0, self._turn_deque)
 
     def _when_queue_is_empty(self):
         return defer.succeed(None)
@@ -318,17 +329,15 @@ class Uploader(QueueMixin):
                 # FIXME merge this with the 'isfile' case.
                 self._log("notified object %s disappeared (this is normal)" % quote_filepath(fp))
                 self._count('objects_disappeared')
-                if not self._db.check_file_db_exists(relpath_u):
+
+                db_entry = self._db.get_db_entry(relpath_u)
+                if db_entry is None:
                     return None
 
-                last_downloaded_timestamp = now
-                last_downloaded_uri = self._db.get_last_downloaded_uri(relpath_u)
+                last_downloaded_timestamp = now  # is this correct?
 
-                current_version = self._db.get_local_file_version(relpath_u)
-                if current_version is None:
-                    new_version = 0
-                elif self._db.is_new_file(pathinfo, relpath_u):
-                    new_version = current_version + 1
+                if self._db.is_new_file(pathinfo, relpath_u):
+                    new_version = db_entry.version + 1
                 else:
                     self._log("Not uploading %r" % (relpath_u,))
                     self._count('objects_not_uploaded')
@@ -337,8 +346,8 @@ class Uploader(QueueMixin):
                 metadata = { 'version': new_version,
                              'deleted': True,
                              'last_downloaded_timestamp': last_downloaded_timestamp }
-                if last_downloaded_uri is not None:
-                    metadata['last_downloaded_uri'] = last_downloaded_uri
+                if db_entry.last_downloaded_uri is not None:
+                    metadata['last_downloaded_uri'] = db_entry.last_downloaded_uri
 
                 empty_uploadable = Data("", self._client.convergence)
                 d2 = self._upload_dirnode.add_file(encoded_path_u, empty_uploadable,
@@ -346,8 +355,10 @@ class Uploader(QueueMixin):
 
                 def _add_db_entry(filenode):
                     filecap = filenode.get_uri()
+                    last_downloaded_uri = metadata.get('last_downloaded_uri', None)
                     self._db.did_upload_version(relpath_u, new_version, filecap,
-                                                last_downloaded_uri, last_downloaded_timestamp, pathinfo)
+                                                last_downloaded_uri, last_downloaded_timestamp,
+                                                pathinfo)
                     self._count('files_uploaded')
                 d2.addCallback(_add_db_entry)
                 return d2
@@ -372,14 +383,14 @@ class Uploader(QueueMixin):
                 upload_d.addCallback(lambda ign: self._scan(relpath_u))
                 return upload_d
             elif pathinfo.isfile:
-                last_downloaded_uri = self._db.get_last_downloaded_uri(relpath_u)
+                db_entry = self._db.get_db_entry(relpath_u)
+
                 last_downloaded_timestamp = now
 
-                current_version = self._db.get_local_file_version(relpath_u)
-                if current_version is None:
+                if db_entry is None:
                     new_version = 0
                 elif self._db.is_new_file(pathinfo, relpath_u):
-                    new_version = current_version + 1
+                    new_version = db_entry.version + 1
                 else:
                     self._log("Not uploading %r" % (relpath_u,))
                     self._count('objects_not_uploaded')
@@ -387,8 +398,8 @@ class Uploader(QueueMixin):
 
                 metadata = { 'version': new_version,
                              'last_downloaded_timestamp': last_downloaded_timestamp }
-                if last_downloaded_uri is not None:
-                    metadata['last_downloaded_uri'] = last_downloaded_uri
+                if db_entry is not None and db_entry.last_downloaded_uri is not None:
+                    metadata['last_downloaded_uri'] = db_entry.last_downloaded_uri
 
                 uploadable = FileName(unicode_from_filepath(fp), self._client.convergence)
                 d2 = self._upload_dirnode.add_file(encoded_path_u, uploadable,
@@ -398,7 +409,8 @@ class Uploader(QueueMixin):
                     filecap = filenode.get_uri()
                     last_downloaded_uri = metadata.get('last_downloaded_uri', None)
                     self._db.did_upload_version(relpath_u, new_version, filecap,
-                                                last_downloaded_uri, last_downloaded_timestamp, pathinfo)
+                                                last_downloaded_uri, last_downloaded_timestamp,
+                                                pathinfo)
                     self._count('files_uploaded')
                 d2.addCallback(_add_db_entry)
                 return d2
@@ -502,7 +514,8 @@ class WriteFileMixin(object):
 class Downloader(QueueMixin, WriteFileMixin):
     REMOTE_SCAN_INTERVAL = 3  # facilitates tests
 
-    def __init__(self, client, local_path_u, db, collective_dirnode, upload_readonly_dircap, clock):
+    def __init__(self, client, local_path_u, db, collective_dirnode,
+                 upload_readonly_dircap, clock, is_upload_pending):
         QueueMixin.__init__(self, client, local_path_u, db, 'downloader', clock)
 
         if not IDirectoryNode.providedBy(collective_dirnode):
@@ -514,6 +527,7 @@ class Downloader(QueueMixin, WriteFileMixin):
 
         self._collective_dirnode = collective_dirnode
         self._upload_readonly_dircap = upload_readonly_dircap
+        self._is_upload_pending = is_upload_pending
 
         self._turn_delay = self.REMOTE_SCAN_INTERVAL
 
@@ -544,9 +558,11 @@ class Downloader(QueueMixin, WriteFileMixin):
             self._log("nope")
             return False
         self._log("yep")
-        v = self._db.get_local_file_version(relpath_u)
-        self._log("v = %r" % (v,))
-        return (v is None or v < remote_version)
+        db_entry = self._db.get_db_entry(relpath_u)
+        if db_entry is None:
+            return True
+        self._log("version %r" % (db_entry.version,))
+        return (db_entry.version < remote_version)
 
     def _get_local_latest(self, relpath_u):
         """
@@ -556,7 +572,8 @@ class Downloader(QueueMixin, WriteFileMixin):
         """
         if not self._get_filepath(relpath_u).exists():
             return None
-        return self._db.get_local_file_version(relpath_u)
+        db_entry = self._db.get_db_entry(relpath_u)
+        return None if db_entry is None else db_entry.version
 
     def _get_collective_latest_file(self, filename):
         """
@@ -623,9 +640,9 @@ class Downloader(QueueMixin, WriteFileMixin):
             for dir_name in dirmap:
                 (dirnode, metadata) = dirmap[dir_name]
                 if dirnode.get_readonly_uri() != self._upload_readonly_dircap:
-                    d2.addCallback(lambda ign, dir_name=dir_name:
+                    d2.addCallback(lambda ign, dir_name=dir_name, dirnode=dirnode:
                                    self._scan_remote_dmd(dir_name, dirnode, scan_batch))
-                    def _err(f):
+                    def _err(f, dir_name=dir_name):
                         self._log("failed to scan DMD for client %r: %s" % (dir_name, f))
                         # XXX what should we do to make this failure more visible to users?
                     d2.addErrback(_err)
@@ -691,18 +708,26 @@ class Downloader(QueueMixin, WriteFileMixin):
             d.addCallback(fail)
         else:
             is_conflict = False
-            if self._db.check_file_db_exists(relpath_u):
+            db_entry = self._db.get_db_entry(relpath_u)
+            if db_entry:
                 dmd_last_downloaded_uri = metadata.get('last_downloaded_uri', None)
-                local_last_downloaded_uri = self._db.get_last_downloaded_uri(relpath_u)
                 print "metadata %r" % (metadata,)
-                print "<<<<--- if %r != %r" % (dmd_last_downloaded_uri, local_last_downloaded_uri)
-                if dmd_last_downloaded_uri is not None and local_last_downloaded_uri is not None:
-                    if dmd_last_downloaded_uri != local_last_downloaded_uri:
+                print "<<<<--- if %r != %r" % (dmd_last_downloaded_uri, db_entry.last_downloaded_uri)
+                if dmd_last_downloaded_uri is not None and db_entry.last_downloaded_uri is not None:
+                    if dmd_last_downloaded_uri != db_entry.last_downloaded_uri:
                         is_conflict = True
                         self._count('objects_conflicted')
-
-                #dmd_last_uploaded_uri = metadata.get('last_uploaded_uri', None)
-                #local_last_uploaded_uri = ...
+                else:
+                    dmd_last_uploaded_uri = metadata.get('last_uploaded_uri', None)
+                    print ">>>>  if %r != %r" % (dmd_last_uploaded_uri, db_entry.last_uploaded_uri)
+                    if dmd_last_uploaded_uri is not None and dmd_last_uploaded_uri != db_entry.last_uploaded_uri:
+                        is_conflict = True
+                        self._count('objects_conflicted')
+                    else:
+                        # XXX todo: mark as conflict if file is in pending upload set
+                        if self._is_upload_pending(relpath_u):
+                            is_conflict = True
+                            self._count('objects_conflicted')
 
             if relpath_u.endswith(u"/"):
                 if metadata.get('deleted', False):