From 8972f25b0fd4575613fe6f98c1256d3befd4e4f7 Mon Sep 17 00:00:00 2001
From: Daira Hopwood <daira@jacaranda.org>
Date: Wed, 16 Sep 2015 14:32:13 +0100
Subject: [PATCH] Move backupdb.py to src/allmydata.

Signed-off-by: Daira Hopwood <daira@jacaranda.org>
---
 src/allmydata/{scripts => }/backupdb.py | 124 ++++++++++++++++++++++--
 src/allmydata/frontends/drop_upload.py  |   2 +-
 src/allmydata/scripts/tahoe_backup.py   |   2 +-
 src/allmydata/test/test_backupdb.py     |   2 +-
 4 files changed, 117 insertions(+), 13 deletions(-)
 rename src/allmydata/{scripts => }/backupdb.py (74%)

diff --git a/src/allmydata/scripts/backupdb.py b/src/allmydata/backupdb.py
similarity index 74%
rename from src/allmydata/scripts/backupdb.py
rename to src/allmydata/backupdb.py
index d0c22616..a51356ca 100644
--- a/src/allmydata/scripts/backupdb.py
+++ b/src/allmydata/backupdb.py
@@ -12,28 +12,28 @@ from allmydata.util.dbutil import get_db, DBError
 DAY = 24*60*60
 MONTH = 30*DAY
 
-SCHEMA_v1 = """
-CREATE TABLE version -- added in v1
+MAIN_SCHEMA = """
+CREATE TABLE version
 (
- version INTEGER  -- contains one row, set to 2
+ version INTEGER  -- contains one row, set to %s
 );
 
-CREATE TABLE local_files -- added in v1
+CREATE TABLE local_files
 (
  path  VARCHAR(1024) PRIMARY KEY, -- index, this is an absolute UTF-8-encoded local filename
  size  INTEGER,       -- os.stat(fn)[stat.ST_SIZE]
  mtime NUMBER,        -- os.stat(fn)[stat.ST_MTIME]
  ctime NUMBER,        -- os.stat(fn)[stat.ST_CTIME]
- fileid INTEGER
+ fileid INTEGER%s
 );
 
-CREATE TABLE caps -- added in v1
+CREATE TABLE caps
 (
  fileid INTEGER PRIMARY KEY AUTOINCREMENT,
  filecap VARCHAR(256) UNIQUE       -- URI:CHK:...
 );
 
-CREATE TABLE last_upload -- added in v1
+CREATE TABLE last_upload
 (
  fileid INTEGER PRIMARY KEY,
  last_uploaded TIMESTAMP,
@@ -42,6 +42,8 @@ CREATE TABLE last_upload -- added in v1
 
 """
 
+SCHEMA_v1 = MAIN_SCHEMA % (1, "")
+
 TABLE_DIRECTORY = """
 
 CREATE TABLE directories -- added in v2
@@ -54,7 +56,7 @@ CREATE TABLE directories -- added in v2
 
 """
 
-SCHEMA_v2 = SCHEMA_v1 + TABLE_DIRECTORY
+SCHEMA_v2 = MAIN_SCHEMA % (2, "") + TABLE_DIRECTORY
 
 UPDATE_v1_to_v2 = TABLE_DIRECTORY + """
 UPDATE version SET version=2;
@@ -64,6 +66,10 @@ UPDATERS = {
     2: UPDATE_v1_to_v2,
 }
 
+
+SCHEMA_v3 = MAIN_SCHEMA % (3, ",\nversion INTEGER\n") + TABLE_DIRECTORY
+
+
 def get_backupdb(dbfile, stderr=sys.stderr,
                  create_version=(SCHEMA_v2, 2), just_create=False):
     # Open or create the given backupdb file. The parent directory must
@@ -71,7 +77,13 @@ def get_backupdb(dbfile, stderr=sys.stderr,
     try:
         (sqlite3, db) = get_db(dbfile, stderr, create_version, updaters=UPDATERS,
                                just_create=just_create, dbname="backupdb")
-        return BackupDB_v2(sqlite3, db)
+        if create_version[1] in (1, 2):
+            return BackupDB(sqlite3, db)
+        elif create_version[1] == 3:
+            return MagicFolderDB(sqlite3, db)
+        else:
+            print >>stderr, "invalid db schema version specified"
+            return None
     except DBError, e:
         print >>stderr, e
         return None
@@ -127,7 +139,7 @@ class DirectoryResult:
         self.bdb.did_check_directory_healthy(self.dircap, results)
 
 
-class BackupDB_v2:
+class BackupDB:
     VERSION = 2
     NO_CHECK_BEFORE = 1*MONTH
     ALWAYS_CHECK_AFTER = 2*MONTH
@@ -137,6 +149,21 @@ class BackupDB_v2:
         self.connection = connection
         self.cursor = connection.cursor()
 
+    def check_file_db_exists(self, path):
+        """I will tell you if a given file has an entry in my database or not
+        by returning True or False.
+        """
+        c = self.cursor
+        c.execute("SELECT size,mtime,ctime,fileid"
+                  " FROM local_files"
+                  " WHERE path=?",
+                  (path,))
+        row = self.cursor.fetchone()
+        if not row:
+            return False
+        else:
+            return True
+
     def check_file(self, path, use_timestamps=True):
         """I will tell you if a given local file needs to be uploaded or not,
         by looking in a database and seeing if I have a record of this file
@@ -336,3 +363,80 @@ class BackupDB_v2:
                             " WHERE dircap=?",
                             (now, dircap))
         self.connection.commit()
+
+
+class MagicFolderDB(BackupDB):
+    VERSION = 3
+
+    def get_all_files(self):
+        """Retreive a list of all files that have had an entry in magic-folder db
+        (files that have been downloaded at least once).
+        """
+        self.cursor.execute("SELECT path FROM local_files")
+        rows = self.cursor.fetchall()
+        if not rows:
+            return None
+        else:
+            return rows
+
+    def get_local_file_version(self, path):
+        """I will tell you the version of a local file tracked by our magic folder db.
+        If no db entry found then I'll return None.
+        """
+        c = self.cursor
+        c.execute("SELECT version, fileid"
+                  " FROM local_files"
+                  " WHERE path=?",
+                  (path,))
+        row = self.cursor.fetchone()
+        if not row:
+            return None
+        else:
+            return row[0]
+
+    def did_upload_file(self, filecap, path, version, mtime, ctime, size):
+        #print "_did_upload_file(%r, %r, %r, %r, %r, %r)" % (filecap, path, version, mtime, ctime, size)
+        now = time.time()
+        fileid = self.get_or_allocate_fileid_for_cap(filecap)
+        try:
+            self.cursor.execute("INSERT INTO last_upload VALUES (?,?,?)",
+                                (fileid, now, now))
+        except (self.sqlite_module.IntegrityError, self.sqlite_module.OperationalError):
+            self.cursor.execute("UPDATE last_upload"
+                                " SET last_uploaded=?, last_checked=?"
+                                " WHERE fileid=?",
+                                (now, now, fileid))
+        try:
+            self.cursor.execute("INSERT INTO local_files VALUES (?,?,?,?,?,?)",
+                                (path, size, mtime, ctime, fileid, version))
+        except (self.sqlite_module.IntegrityError, self.sqlite_module.OperationalError):
+            self.cursor.execute("UPDATE local_files"
+                                " SET size=?, mtime=?, ctime=?, fileid=?, version=?"
+                                " WHERE path=?",
+                                (size, mtime, ctime, fileid, version, path))
+        self.connection.commit()
+
+    def is_new_file_time(self, path, relpath_u):
+        """recent_file_time returns true if the file is recent...
+        meaning its current statinfo (i.e. size, ctime, and mtime) matched the statinfo
+        that was previously stored in the db.
+        """
+        #print "check_file_time %s %s" % (path, relpath_u)
+        path = abspath_expanduser_unicode(path)
+        s = os.stat(path)
+        size = s[stat.ST_SIZE]
+        ctime = s[stat.ST_CTIME]
+        mtime = s[stat.ST_MTIME]
+        c = self.cursor
+        c.execute("SELECT size,mtime,ctime,fileid"
+                  " FROM local_files"
+                  " WHERE path=?",
+                  (relpath_u,))
+        row = self.cursor.fetchone()
+        if not row:
+            return True
+        (last_size,last_mtime,last_ctime,last_fileid) = row
+        if (size, ctime, mtime) == (last_size, last_ctime, last_mtime):
+            return False
+        else:
+            return True
diff --git a/src/allmydata/frontends/drop_upload.py b/src/allmydata/frontends/drop_upload.py
index ea561d9f..16e96919 100644
--- a/src/allmydata/frontends/drop_upload.py
+++ b/src/allmydata/frontends/drop_upload.py
@@ -12,7 +12,7 @@ from allmydata.util.fileutil import abspath_expanduser_unicode, precondition_abs
 from allmydata.util.encodingutil import listdir_unicode, to_filepath, \
      unicode_from_filepath, quote_local_unicode_path, FilenameEncodingError
 from allmydata.immutable.upload import FileName
-from allmydata.scripts import backupdb
+from allmydata import backupdb
 
 
 class DropUploader(service.MultiService):
diff --git a/src/allmydata/scripts/tahoe_backup.py b/src/allmydata/scripts/tahoe_backup.py
index f12b3171..e2276406 100644
--- a/src/allmydata/scripts/tahoe_backup.py
+++ b/src/allmydata/scripts/tahoe_backup.py
@@ -8,7 +8,7 @@ from allmydata.scripts.common import get_alias, escape_path, DEFAULT_ALIAS, \
                                      UnknownAliasError
 from allmydata.scripts.common_http import do_http, HTTPError, format_http_error
 from allmydata.util import time_format
-from allmydata.scripts import backupdb
+from allmydata import backupdb
 from allmydata.util.encodingutil import listdir_unicode, quote_output, \
      quote_local_unicode_path, to_str, FilenameEncodingError, unicode_to_url
 from allmydata.util.assertutil import precondition
diff --git a/src/allmydata/test/test_backupdb.py b/src/allmydata/test/test_backupdb.py
index 835e2531..dddfb821 100644
--- a/src/allmydata/test/test_backupdb.py
+++ b/src/allmydata/test/test_backupdb.py
@@ -6,7 +6,7 @@ from twisted.trial import unittest
 from allmydata.util import fileutil
 from allmydata.util.encodingutil import listdir_unicode, get_filesystem_encoding, unicode_platform
 from allmydata.util.assertutil import precondition
-from allmydata.scripts import backupdb
+from allmydata import backupdb
 
 class BackupDB(unittest.TestCase):
     def create(self, dbfile):
-- 
2.45.2