-# the backupdb is only available if sqlite3 is available. Python-2.5.x and
-# beyond include sqlite3 in the standard library. For python-2.4, the
-# "pysqlite2" "package" (or "module") (which, despite the confusing name, uses
-# sqlite3, and which, confusingly, comes in the "pysqlite" "distribution" (or
-# "package")) must be installed. On debian, install python-pysqlite2
-
import os.path, sys, time, random, stat
+
from allmydata.util.netstring import netstring
from allmydata.util.hashutil import backupdb_dirhash
from allmydata.util import base32
+from allmydata.util.fileutil import abspath_expanduser_unicode
+from allmydata.util.encodingutil import to_str
+from allmydata.util.dbutil import get_db, DBError
+
DAY = 24*60*60
MONTH = 30*DAY
CREATE TABLE local_files -- added in v1
(
- path VARCHAR(1024) PRIMARY KEY, -- index, this is os.path.abspath(fn)
+ path VARCHAR(1024) PRIMARY KEY, -- index, this is an absolute UTF-8-encoded local filename
size INTEGER, -- os.stat(fn)[stat.ST_SIZE]
mtime NUMBER, -- os.stat(fn)[stat.ST_MTIME]
ctime NUMBER, -- os.stat(fn)[stat.ST_CTIME]
UPDATE version SET version=2;
"""
+UPDATERS = {
+ 2: UPDATE_v1_to_v2,
+}
def get_backupdb(dbfile, stderr=sys.stderr,
create_version=(SCHEMA_v2, 2), just_create=False):
- # open or create the given backupdb file. The parent directory must
+ # Open or create the given backupdb file. The parent directory must
# exist.
try:
- import sqlite3
- sqlite = sqlite3 # pyflakes whines about 'import sqlite3 as sqlite' ..
- except ImportError:
- from pysqlite2 import dbapi2
- sqlite = dbapi2 # .. when this clause does it too
- # This import should never fail, because setuptools requires that the
- # "pysqlite" distribution is present at start time (if on Python < 2.5).
-
- must_create = not os.path.exists(dbfile)
- try:
- db = sqlite.connect(dbfile)
- except (EnvironmentError, sqlite.OperationalError), e:
- print >>stderr, "Unable to create/open backupdb file %s: %s" % (dbfile, e)
- return None
-
- c = db.cursor()
- if must_create:
- schema, version = create_version
- c.executescript(schema)
- c.execute("INSERT INTO version (version) VALUES (?)", (version,))
- db.commit()
-
- try:
- c.execute("SELECT version FROM version")
- version = c.fetchone()[0]
- except sqlite.DatabaseError, e:
- # this indicates that the file is not a compatible database format.
- # Perhaps it was created with an old version, or it might be junk.
- print >>stderr, "backupdb file is unusable: %s" % e
+ (sqlite3, db) = get_db(dbfile, stderr, create_version, updaters=UPDATERS,
+ just_create=just_create, dbname="backupdb")
+ return BackupDB_v2(sqlite3, db)
+ except DBError, e:
+ print >>stderr, e
return None
- if just_create: # for tests
- return True
-
- if version == 1:
- c.executescript(UPDATE_v1_to_v2)
- db.commit()
- version = 2
- if version == 2:
- return BackupDB_v2(sqlite, db)
- print >>stderr, "Unable to handle backupdb version %s" % version
- return None
class FileResult:
def __init__(self, bdb, filecap, should_check,
def did_check_healthy(self, results):
self.bdb.did_check_file_healthy(self.filecap, results)
+
class DirectoryResult:
def __init__(self, bdb, dirhash, dircap, should_check):
self.bdb = bdb
def did_check_healthy(self, results):
self.bdb.did_check_directory_healthy(self.dircap, results)
+
class BackupDB_v2:
VERSION = 2
NO_CHECK_BEFORE = 1*MONTH
is not healthy, please upload the file and call r.did_upload(filecap)
when you're done.
- I use_timestamps=True (the default), I will compare ctime and mtime
+ If use_timestamps=True (the default), I will compare ctime and mtime
of the local file against an entry in my database, and consider the
file to be unchanged if ctime, mtime, and filesize are all the same
as the earlier version. If use_timestamps=False, I will not trust the
current working directory. The database stores absolute pathnames.
"""
- path = os.path.abspath(path)
+ path = abspath_expanduser_unicode(path)
+
+ # TODO: consider using get_pathinfo.
s = os.stat(path)
size = s[stat.ST_SIZE]
ctime = s[stat.ST_CTIME]
probability = min(max(probability, 0.0), 1.0)
should_check = bool(random.random() < probability)
- return FileResult(self, str(filecap), should_check,
+ return FileResult(self, to_str(filecap), should_check,
path, mtime, ctime, size)
def get_or_allocate_fileid_for_cap(self, filecap):
c.execute("INSERT INTO caps (filecap) VALUES (?)", (filecap,))
except (self.sqlite_module.IntegrityError, self.sqlite_module.OperationalError):
# sqlite3 on sid gives IntegrityError
- # pysqlite2 on dapper gives OperationalError
+ # pysqlite2 (which we don't use, so maybe no longer relevant) on dapper gives OperationalError
pass
c.execute("SELECT fileid FROM caps WHERE filecap=?", (filecap,))
foundrow = c.fetchone()
probability = min(max(probability, 0.0), 1.0)
should_check = bool(random.random() < probability)
- return DirectoryResult(self, dirhash_s, str(dircap), should_check)
+ return DirectoryResult(self, dirhash_s, to_str(dircap), should_check)
def did_create_directory(self, dircap, dirhash):
now = time.time()