]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/blobdiff - src/allmydata/frontends/magic_folder.py
Add a unit-test and correct the code for "already deleted"
[tahoe-lafs/tahoe-lafs.git] / src / allmydata / frontends / magic_folder.py
index 1be3e683adf3b9e8eb003093982b0c18521613e0..42b639c7b026f9f5b2df3516bd0da8950e93d59e 100644 (file)
@@ -65,7 +65,8 @@ class MagicFolder(service.MultiService):
         collective_dirnode = self._client.create_node_from_uri(collective_dircap)
 
         self.uploader = Uploader(client, local_path_u, db, upload_dirnode, pending_delay, clock, immediate)
-        self.downloader = Downloader(client, local_path_u, db, collective_dirnode, upload_dirnode.get_readonly_uri(), clock)
+        self.downloader = Downloader(client, local_path_u, db, collective_dirnode,
+                                     upload_dirnode.get_readonly_uri(), clock, self.uploader.is_pending)
 
     def startService(self):
         # TODO: why is this being called more than once?
@@ -265,6 +266,9 @@ class Uploader(QueueMixin):
 
         return d
 
+    def is_pending(self, relpath_u):
+        return relpath_u in self._pending
+
     def _notify(self, opaque, path, events_mask):
         self._log("inotify event %r, %r, %r\n" % (opaque, path, ', '.join(self._inotify.humanReadableMask(events_mask))))
         relpath_u = self._get_relpath(path)
@@ -325,17 +329,15 @@ class Uploader(QueueMixin):
                 # FIXME merge this with the 'isfile' case.
                 self._log("notified object %s disappeared (this is normal)" % quote_filepath(fp))
                 self._count('objects_disappeared')
-                if not self._db.check_file_db_exists(relpath_u):
+
+                db_entry = self._db.get_db_entry(relpath_u)
+                if db_entry is None:
                     return None
 
-                last_downloaded_timestamp = now
-                last_downloaded_uri = self._db.get_last_downloaded_uri(relpath_u)
+                last_downloaded_timestamp = now  # is this correct?
 
-                current_version = self._db.get_local_file_version(relpath_u)
-                if current_version is None:
-                    new_version = 0
-                elif self._db.is_new_file(pathinfo, relpath_u):
-                    new_version = current_version + 1
+                if self._db.is_new_file(pathinfo, relpath_u):
+                    new_version = db_entry.version + 1
                 else:
                     self._log("Not uploading %r" % (relpath_u,))
                     self._count('objects_not_uploaded')
@@ -344,8 +346,8 @@ class Uploader(QueueMixin):
                 metadata = { 'version': new_version,
                              'deleted': True,
                              'last_downloaded_timestamp': last_downloaded_timestamp }
-                if last_downloaded_uri is not None:
-                    metadata['last_downloaded_uri'] = last_downloaded_uri
+                if db_entry.last_downloaded_uri is not None:
+                    metadata['last_downloaded_uri'] = db_entry.last_downloaded_uri
 
                 empty_uploadable = Data("", self._client.convergence)
                 d2 = self._upload_dirnode.add_file(encoded_path_u, empty_uploadable,
@@ -353,8 +355,10 @@ class Uploader(QueueMixin):
 
                 def _add_db_entry(filenode):
                     filecap = filenode.get_uri()
+                    last_downloaded_uri = metadata.get('last_downloaded_uri', None)
                     self._db.did_upload_version(relpath_u, new_version, filecap,
-                                                last_downloaded_uri, last_downloaded_timestamp, pathinfo)
+                                                last_downloaded_uri, last_downloaded_timestamp,
+                                                pathinfo)
                     self._count('files_uploaded')
                 d2.addCallback(_add_db_entry)
                 return d2
@@ -379,14 +383,14 @@ class Uploader(QueueMixin):
                 upload_d.addCallback(lambda ign: self._scan(relpath_u))
                 return upload_d
             elif pathinfo.isfile:
-                last_downloaded_uri = self._db.get_last_downloaded_uri(relpath_u)
+                db_entry = self._db.get_db_entry(relpath_u)
+
                 last_downloaded_timestamp = now
 
-                current_version = self._db.get_local_file_version(relpath_u)
-                if current_version is None:
+                if db_entry is None:
                     new_version = 0
                 elif self._db.is_new_file(pathinfo, relpath_u):
-                    new_version = current_version + 1
+                    new_version = db_entry.version + 1
                 else:
                     self._log("Not uploading %r" % (relpath_u,))
                     self._count('objects_not_uploaded')
@@ -394,8 +398,8 @@ class Uploader(QueueMixin):
 
                 metadata = { 'version': new_version,
                              'last_downloaded_timestamp': last_downloaded_timestamp }
-                if last_downloaded_uri is not None:
-                    metadata['last_downloaded_uri'] = last_downloaded_uri
+                if db_entry is not None and db_entry.last_downloaded_uri is not None:
+                    metadata['last_downloaded_uri'] = db_entry.last_downloaded_uri
 
                 uploadable = FileName(unicode_from_filepath(fp), self._client.convergence)
                 d2 = self._upload_dirnode.add_file(encoded_path_u, uploadable,
@@ -405,7 +409,8 @@ class Uploader(QueueMixin):
                     filecap = filenode.get_uri()
                     last_downloaded_uri = metadata.get('last_downloaded_uri', None)
                     self._db.did_upload_version(relpath_u, new_version, filecap,
-                                                last_downloaded_uri, last_downloaded_timestamp, pathinfo)
+                                                last_downloaded_uri, last_downloaded_timestamp,
+                                                pathinfo)
                     self._count('files_uploaded')
                 d2.addCallback(_add_db_entry)
                 return d2
@@ -500,8 +505,7 @@ class WriteFileMixin(object):
         self._log('renaming deleted file to backup: %s' % (abspath_u,))
         try:
             fileutil.rename_no_overwrite(abspath_u, abspath_u + u'.backup')
-        except IOError:
-            # XXX is this the correct error?
+        except OSError:
             self._log("Already gone: '%s'" % (abspath_u,))
         return abspath_u
 
@@ -509,7 +513,8 @@ class WriteFileMixin(object):
 class Downloader(QueueMixin, WriteFileMixin):
     REMOTE_SCAN_INTERVAL = 3  # facilitates tests
 
-    def __init__(self, client, local_path_u, db, collective_dirnode, upload_readonly_dircap, clock):
+    def __init__(self, client, local_path_u, db, collective_dirnode,
+                 upload_readonly_dircap, clock, is_upload_pending):
         QueueMixin.__init__(self, client, local_path_u, db, 'downloader', clock)
 
         if not IDirectoryNode.providedBy(collective_dirnode):
@@ -521,6 +526,7 @@ class Downloader(QueueMixin, WriteFileMixin):
 
         self._collective_dirnode = collective_dirnode
         self._upload_readonly_dircap = upload_readonly_dircap
+        self._is_upload_pending = is_upload_pending
 
         self._turn_delay = self.REMOTE_SCAN_INTERVAL
 
@@ -551,9 +557,11 @@ class Downloader(QueueMixin, WriteFileMixin):
             self._log("nope")
             return False
         self._log("yep")
-        v = self._db.get_local_file_version(relpath_u)
-        self._log("v = %r" % (v,))
-        return (v is None or v < remote_version)
+        db_entry = self._db.get_db_entry(relpath_u)
+        if db_entry is None:
+            return True
+        self._log("version %r" % (db_entry.version,))
+        return (db_entry.version < remote_version)
 
     def _get_local_latest(self, relpath_u):
         """
@@ -563,7 +571,8 @@ class Downloader(QueueMixin, WriteFileMixin):
         """
         if not self._get_filepath(relpath_u).exists():
             return None
-        return self._db.get_local_file_version(relpath_u)
+        db_entry = self._db.get_db_entry(relpath_u)
+        return None if db_entry is None else db_entry.version
 
     def _get_collective_latest_file(self, filename):
         """
@@ -698,18 +707,26 @@ class Downloader(QueueMixin, WriteFileMixin):
             d.addCallback(fail)
         else:
             is_conflict = False
-            if self._db.check_file_db_exists(relpath_u):
+            db_entry = self._db.get_db_entry(relpath_u)
+            if db_entry:
                 dmd_last_downloaded_uri = metadata.get('last_downloaded_uri', None)
-                local_last_downloaded_uri = self._db.get_last_downloaded_uri(relpath_u)
                 print "metadata %r" % (metadata,)
-                print "<<<<--- if %r != %r" % (dmd_last_downloaded_uri, local_last_downloaded_uri)
-                if dmd_last_downloaded_uri is not None and local_last_downloaded_uri is not None:
-                    if dmd_last_downloaded_uri != local_last_downloaded_uri:
+                print "<<<<--- if %r != %r" % (dmd_last_downloaded_uri, db_entry.last_downloaded_uri)
+                if dmd_last_downloaded_uri is not None and db_entry.last_downloaded_uri is not None:
+                    if dmd_last_downloaded_uri != db_entry.last_downloaded_uri:
                         is_conflict = True
                         self._count('objects_conflicted')
-
-                #dmd_last_uploaded_uri = metadata.get('last_uploaded_uri', None)
-                #local_last_uploaded_uri = ...
+                else:
+                    dmd_last_uploaded_uri = metadata.get('last_uploaded_uri', None)
+                    print ">>>>  if %r != %r" % (dmd_last_uploaded_uri, db_entry.last_uploaded_uri)
+                    if dmd_last_uploaded_uri is not None and dmd_last_uploaded_uri != db_entry.last_uploaded_uri:
+                        is_conflict = True
+                        self._count('objects_conflicted')
+                    else:
+                        # XXX todo: mark as conflict if file is in pending upload set
+                        if self._is_upload_pending(relpath_u):
+                            is_conflict = True
+                            self._count('objects_conflicted')
 
             if relpath_u.endswith(u"/"):
                 if metadata.get('deleted', False):