name = 'magic-folder'
def __init__(self, client, upload_dircap, collective_dircap, local_path_u, dbfile,
- pending_delay=1.0, clock=reactor):
+ pending_delay=1.0, clock=None):
precondition_abspath(local_path_u)
service.MultiService.__init__(self)
+ immediate = clock is not None
+ clock = clock or reactor
db = magicfolderdb.get_magicfolderdb(dbfile, create_version=(magicfolderdb.SCHEMA_v1, 1))
if db is None:
return Failure(Exception('ERROR: Unable to load magic folder db.'))
upload_dirnode = self._client.create_node_from_uri(upload_dircap)
collective_dirnode = self._client.create_node_from_uri(collective_dircap)
- self.uploader = Uploader(client, local_path_u, db, upload_dirnode, pending_delay, clock)
- self.downloader = Downloader(client, local_path_u, db, collective_dirnode, upload_dirnode.get_readonly_uri(), clock)
+ self.uploader = Uploader(client, local_path_u, db, upload_dirnode, pending_delay, clock, immediate)
+ self.downloader = Downloader(client, local_path_u, db, collective_dirnode,
+ upload_dirnode.get_readonly_uri(), clock, self.uploader.is_pending)
def startService(self):
# TODO: why is this being called more than once?
class Uploader(QueueMixin):
- def __init__(self, client, local_path_u, db, upload_dirnode, pending_delay, clock):
+ def __init__(self, client, local_path_u, db, upload_dirnode, pending_delay, clock,
+ immediate=False):
QueueMixin.__init__(self, client, local_path_u, db, 'uploader', clock)
self.is_ready = False
+ self._immediate = immediate
if not IDirectoryNode.providedBy(upload_dirnode):
raise AssertionError("The URI in '%s' does not refer to a directory."
return d
+ def is_pending(self, relpath_u):
+ return relpath_u in self._pending
+
def _notify(self, opaque, path, events_mask):
self._log("inotify event %r, %r, %r\n" % (opaque, path, ', '.join(self._inotify.humanReadableMask(events_mask))))
relpath_u = self._get_relpath(path)
self._pending.add(relpath_u)
self._count('objects_queued')
if self.is_ready:
- self._clock.callLater(0, self._turn_deque)
+ if self._immediate: # for tests
+ self._turn_deque()
+ else:
+ self._clock.callLater(0, self._turn_deque)
def _when_queue_is_empty(self):
return defer.succeed(None)
class Downloader(QueueMixin, WriteFileMixin):
REMOTE_SCAN_INTERVAL = 3 # facilitates tests
- def __init__(self, client, local_path_u, db, collective_dirnode, upload_readonly_dircap, clock):
+ def __init__(self, client, local_path_u, db, collective_dirnode,
+ upload_readonly_dircap, clock, is_upload_pending):
QueueMixin.__init__(self, client, local_path_u, db, 'downloader', clock)
if not IDirectoryNode.providedBy(collective_dirnode):
self._collective_dirnode = collective_dirnode
self._upload_readonly_dircap = upload_readonly_dircap
+ self._is_upload_pending = is_upload_pending
self._turn_delay = self.REMOTE_SCAN_INTERVAL
for dir_name in dirmap:
(dirnode, metadata) = dirmap[dir_name]
if dirnode.get_readonly_uri() != self._upload_readonly_dircap:
- d2.addCallback(lambda ign, dir_name=dir_name:
+ d2.addCallback(lambda ign, dir_name=dir_name, dirnode=dirnode:
self._scan_remote_dmd(dir_name, dirnode, scan_batch))
- def _err(f):
+ def _err(f, dir_name=dir_name):
self._log("failed to scan DMD for client %r: %s" % (dir_name, f))
# XXX what should we do to make this failure more visible to users?
d2.addErrback(_err)
if dmd_last_downloaded_uri != local_last_downloaded_uri:
is_conflict = True
self._count('objects_conflicted')
-
- #dmd_last_uploaded_uri = metadata.get('last_uploaded_uri', None)
- #local_last_uploaded_uri = ...
+ else:
+ dmd_last_uploaded_uri = metadata.get('last_uploaded_uri', None)
+ local_last_uploaded_uri = self._db.get_last_uploaded_uri(relpath_u)
+ print ">>>> if %r != %r" % (dmd_last_uploaded_uri, local_last_uploaded_uri)
+ if dmd_last_uploaded_uri is not None and dmd_last_uploaded_uri != local_last_uploaded_uri:
+ is_conflict = True
+ self._count('objects_conflicted')
+ else:
+ # XXX todo: mark as conflict if file is in pending upload set
+ if self._is_upload_pending(relpath_u):
+ is_conflict = True
+ self._count('objects_conflicted')
if relpath_u.endswith(u"/"):
if metadata.get('deleted', False):