]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/commitdiff
WIP: Refactoring to get db fields in one query.
authorDaira Hopwood <daira@jacaranda.org>
Thu, 29 Oct 2015 20:43:44 +0000 (20:43 +0000)
committerDaira Hopwood <daira@jacaranda.org>
Mon, 28 Dec 2015 16:18:54 +0000 (16:18 +0000)
Signed-off-by: Daira Hopwood <daira@jacaranda.org>
src/allmydata/frontends/magic_folder.py
src/allmydata/magicfolderdb.py

index e52d1c97bd21a790f1c8e90a2fdd5744cb697e9c..93463e7aa958994b5abc5f07476a68fe02b89870 100644 (file)
@@ -329,17 +329,15 @@ class Uploader(QueueMixin):
                 # FIXME merge this with the 'isfile' case.
                 self._log("notified object %s disappeared (this is normal)" % quote_filepath(fp))
                 self._count('objects_disappeared')
-                if not self._db.check_file_db_exists(relpath_u):
+
+                db_entry = self._db.get_db_entry(relpath_u)
+                if db_entry is None:
                     return None
 
-                last_downloaded_timestamp = now
-                last_downloaded_uri = self._db.get_last_downloaded_uri(relpath_u)
+                last_downloaded_timestamp = now  # is this correct?
 
-                current_version = self._db.get_local_file_version(relpath_u)
-                if current_version is None:
-                    new_version = 0
-                elif self._db.is_new_file(pathinfo, relpath_u):
-                    new_version = current_version + 1
+                if self._db.is_new_file(pathinfo, relpath_u):
+                    new_version = db_entry.version + 1
                 else:
                     self._log("Not uploading %r" % (relpath_u,))
                     self._count('objects_not_uploaded')
@@ -348,8 +346,8 @@ class Uploader(QueueMixin):
                 metadata = { 'version': new_version,
                              'deleted': True,
                              'last_downloaded_timestamp': last_downloaded_timestamp }
-                if last_downloaded_uri is not None:
-                    metadata['last_downloaded_uri'] = last_downloaded_uri
+                if db_entry.last_downloaded_uri is not None:
+                    metadata['last_downloaded_uri'] = db_entry.last_downloaded_uri
 
                 empty_uploadable = Data("", self._client.convergence)
                 d2 = self._upload_dirnode.add_file(encoded_path_u, empty_uploadable,
@@ -357,8 +355,10 @@ class Uploader(QueueMixin):
 
                 def _add_db_entry(filenode):
                     filecap = filenode.get_uri()
+                    last_downloaded_uri = metadata.get('last_downloaded_uri', None)
                     self._db.did_upload_version(relpath_u, new_version, filecap,
-                                                last_downloaded_uri, last_downloaded_timestamp, pathinfo)
+                                                last_downloaded_uri, last_downloaded_timestamp,
+                                                pathinfo)
                     self._count('files_uploaded')
                 d2.addCallback(_add_db_entry)
                 return d2
@@ -383,14 +383,14 @@ class Uploader(QueueMixin):
                 upload_d.addCallback(lambda ign: self._scan(relpath_u))
                 return upload_d
             elif pathinfo.isfile:
-                last_downloaded_uri = self._db.get_last_downloaded_uri(relpath_u)
+                db_entry = self._db.get_db_entry(relpath_u)
+
                 last_downloaded_timestamp = now
 
-                current_version = self._db.get_local_file_version(relpath_u)
-                if current_version is None:
+                if db_entry is None:
                     new_version = 0
                 elif self._db.is_new_file(pathinfo, relpath_u):
-                    new_version = current_version + 1
+                    new_version = db_entry.version + 1
                 else:
                     self._log("Not uploading %r" % (relpath_u,))
                     self._count('objects_not_uploaded')
@@ -398,8 +398,8 @@ class Uploader(QueueMixin):
 
                 metadata = { 'version': new_version,
                              'last_downloaded_timestamp': last_downloaded_timestamp }
-                if last_downloaded_uri is not None:
-                    metadata['last_downloaded_uri'] = last_downloaded_uri
+                if db_entry is not None and db_entry.last_downloaded_uri is not None:
+                    metadata['last_downloaded_uri'] = db_entry.last_downloaded_uri
 
                 uploadable = FileName(unicode_from_filepath(fp), self._client.convergence)
                 d2 = self._upload_dirnode.add_file(encoded_path_u, uploadable,
@@ -409,7 +409,8 @@ class Uploader(QueueMixin):
                     filecap = filenode.get_uri()
                     last_downloaded_uri = metadata.get('last_downloaded_uri', None)
                     self._db.did_upload_version(relpath_u, new_version, filecap,
-                                                last_downloaded_uri, last_downloaded_timestamp, pathinfo)
+                                                last_downloaded_uri, last_downloaded_timestamp,
+                                                pathinfo)
                     self._count('files_uploaded')
                 d2.addCallback(_add_db_entry)
                 return d2
@@ -557,9 +558,11 @@ class Downloader(QueueMixin, WriteFileMixin):
             self._log("nope")
             return False
         self._log("yep")
-        v = self._db.get_local_file_version(relpath_u)
-        self._log("v = %r" % (v,))
-        return (v is None or v < remote_version)
+        db_entry = self._db.get_db_entry(relpath_u)
+        if db_entry is None:
+            return True
+        self._log("version %r" % (db_entry.version,))
+        return (db_entry.version < remote_version)
 
     def _get_local_latest(self, relpath_u):
         """
@@ -569,7 +572,8 @@ class Downloader(QueueMixin, WriteFileMixin):
         """
         if not self._get_filepath(relpath_u).exists():
             return None
-        return self._db.get_local_file_version(relpath_u)
+        db_entry = self._db.get_db_entry(relpath_u)
+        return None if db_entry is None else db_entry.version
 
     def _get_collective_latest_file(self, filename):
         """
@@ -704,20 +708,19 @@ class Downloader(QueueMixin, WriteFileMixin):
             d.addCallback(fail)
         else:
             is_conflict = False
-            if self._db.check_file_db_exists(relpath_u):
+            db_entry = self._db.get_db_entry(relpath_u)
+            if db_entry:
                 dmd_last_downloaded_uri = metadata.get('last_downloaded_uri', None)
-                local_last_downloaded_uri = self._db.get_last_downloaded_uri(relpath_u)
                 print "metadata %r" % (metadata,)
-                print "<<<<--- if %r != %r" % (dmd_last_downloaded_uri, local_last_downloaded_uri)
-                if dmd_last_downloaded_uri is not None and local_last_downloaded_uri is not None:
-                    if dmd_last_downloaded_uri != local_last_downloaded_uri:
+                print "<<<<--- if %r != %r" % (dmd_last_downloaded_uri, db_entry.last_downloaded_uri)
+                if dmd_last_downloaded_uri is not None and db_entry.last_downloaded_uri is not None:
+                    if dmd_last_downloaded_uri != db_entry.last_downloaded_uri:
                         is_conflict = True
                         self._count('objects_conflicted')
                 else:
                     dmd_last_uploaded_uri = metadata.get('last_uploaded_uri', None)
-                    local_last_uploaded_uri = self._db.get_last_uploaded_uri(relpath_u)
-                    print ">>>>  if %r != %r" % (dmd_last_uploaded_uri, local_last_uploaded_uri)
-                    if dmd_last_uploaded_uri is not None and dmd_last_uploaded_uri != local_last_uploaded_uri:
+                    print ">>>>  if %r != %r" % (dmd_last_uploaded_uri, db_entry.last_uploaded_uri)
+                    if dmd_last_uploaded_uri is not None and dmd_last_uploaded_uri != db_entry.last_uploaded_uri:
                         is_conflict = True
                         self._count('objects_conflicted')
                     else:
index e3649d4edea3e6db91df162400a454467acc0f95..3f168328945e462998205cdc3934b98b41c2dbfb 100644 (file)
@@ -1,5 +1,6 @@
 
 import sys
+from collections import namedtuple
 
 from allmydata.util.dbutil import get_db, DBError
 
@@ -42,6 +43,7 @@ def get_magicfolderdb(dbfile, stderr=sys.stderr,
         print >>stderr, e
         return None
 
+PathEntry = namedtuple('PathEntry', 'size mtime ctime version last_uploaded_uri last_downloaded_uri last_downloaded_timestamp')
 
 class MagicFolderDB(object):
     VERSION = 1
@@ -51,20 +53,25 @@ class MagicFolderDB(object):
         self.connection = connection
         self.cursor = connection.cursor()
 
-    def check_file_db_exists(self, path):
-        """I will tell you if a given file has an entry in my database or not
-        by returning True or False.
+    def get_db_entry(self, relpath_u):
+        """
+        Retrieve the entry in the database for a given path, or return None
+        if there is no such entry.
         """
         c = self.cursor
-        c.execute("SELECT size,mtime,ctime"
+        c.execute("SELECT size, mtime, ctime, version, last_uploaded_uri, last_downloaded_uri, last_downloaded_timestamp"
                   " FROM local_files"
                   " WHERE path=?",
-                  (path,))
+                  (relpath_u,))
         row = self.cursor.fetchone()
         if not row:
-            return False
+            return None
         else:
-            return True
+            (size, mtime, ctime, version, last_uploaded_uri, last_downloaded_uri, last_downloaded_timestamp) = row
+            return PathEntry(size=size, mtime=mtime, ctime=ctime, version=version,
+                             last_uploaded_uri=last_uploaded_uri,
+                             last_downloaded_uri=last_downloaded_uri,
+                             last_downloaded_timestamp=last_downloaded_timestamp)
 
     def get_all_relpaths(self):
         """
@@ -75,54 +82,6 @@ class MagicFolderDB(object):
         rows = self.cursor.fetchall()
         return set([r[0] for r in rows])
 
-    def get_last_downloaded_uri(self, relpath_u):
-        """
-        Return the last downloaded uri recorded in the magic folder db.
-        If none are found then return None.
-        """
-        c = self.cursor
-        c.execute("SELECT last_downloaded_uri"
-                  " FROM local_files"
-                  " WHERE path=?",
-                  (relpath_u,))
-        row = self.cursor.fetchone()
-        if not row:
-            return None
-        else:
-            return row[0]
-
-    def get_last_uploaded_uri(self, relpath_u):
-        """
-        Return the last downloaded uri recorded in the magic folder db.
-        If none are found then return None.
-        """
-        c = self.cursor
-        c.execute("SELECT last_uploaded_uri"
-                  " FROM local_files"
-                  " WHERE path=?",
-                  (relpath_u,))
-        row = self.cursor.fetchone()
-        if not row:
-            return None
-        else:
-            return row[0]
-
-    def get_local_file_version(self, relpath_u):
-        """
-        Return the version of a local file tracked by our magic folder db.
-        If no db entry is found then return None.
-        """
-        c = self.cursor
-        c.execute("SELECT version"
-                  " FROM local_files"
-                  " WHERE path=?",
-                  (relpath_u,))
-        row = self.cursor.fetchone()
-        if not row:
-            return None
-        else:
-            return row[0]
-
     def did_upload_version(self, relpath_u, version, last_uploaded_uri, last_downloaded_uri, last_downloaded_timestamp, pathinfo):
         print "%r.did_upload_version(%r, %r, %r, %r, %r, %r)" % (self, relpath_u, version, last_uploaded_uri, last_downloaded_uri, last_downloaded_timestamp, pathinfo)
         try: