consolidator: add more verbose traversal of directories
authorBrian Warner <warner@allmydata.com>
Thu, 12 Mar 2009 23:29:00 +0000 (16:29 -0700)
committerBrian Warner <warner@allmydata.com>
Thu, 12 Mar 2009 23:29:00 +0000 (16:29 -0700)
src/allmydata/scripts/consolidate.py
src/allmydata/scripts/debug.py
src/allmydata/test/test_consolidate.py

index eea872c79f11e86b12d372bc91232cc31249c092..ef5c22e2688f0436d103c72c10fb7f41fb6388ea 100644 (file)
@@ -222,7 +222,7 @@ class Consolidator:
                 # readonly directory (which shares common subdirs with previous
                 # backups)
                 self.msg(" %s: processing" % rwname)
-                readcap = self.process_directory(readonly(writecap))
+                readcap = self.process_directory(readonly(writecap), (rwname,))
             if self.options["really"]:
                 self.msg("  replaced %s" % rwname)
                 self.put_child(archives_dircap, rwname, readcap)
@@ -241,7 +241,7 @@ class Consolidator:
                  % (self.directories_created, self.directories_used_as_is,
                     self.directories_reused))
 
-    def process_directory(self, readcap):
+    def process_directory(self, readcap, path):
         # I walk all my children (recursing over any subdirectories), build
         # up a table of my contents, then see if I can re-use an old
         # directory with the same contents. If not, I create a new directory
@@ -257,7 +257,9 @@ class Consolidator:
         for (childname, (childtype, childdata)) in sorted(data["children"].items()):
             if childtype == "dirnode":
                 num_dirs += 1
-                childcap = self.process_directory(str(childdata["ro_uri"]))
+                childpath = path + (childname,)
+                childcap = self.process_directory(str(childdata["ro_uri"]),
+                                                  childpath)
                 contents[childname] = ("dirnode", childcap, None)
             else:
                 childcap = str(childdata["ro_uri"])
@@ -267,17 +269,23 @@ class Consolidator:
         dirhash = self.hash_directory_contents(hashkids)
         old_dircap = self.get_old_dirhash(dirhash)
         if old_dircap:
+            if self.options["verbose"]:
+                self.msg("   %s: reused" % "/".join(path))
             assert isinstance(old_dircap, str)
             self.directories_reused += 1
             return old_dircap
         if num_dirs == 0:
-            # we're allowed to re-use this directory
+            # we're allowed to use this directory as-is
+            if self.options["verbose"]:
+                self.msg("   %s: used as-is" % "/".join(path))
             new_dircap = readonly(readcap)
             assert isinstance(new_dircap, str)
             self.store_dirhash(dirhash, new_dircap)
             self.directories_used_as_is += 1
             return new_dircap
         # otherwise, we need to create a new directory
+        if self.options["verbose"]:
+            self.msg("   %s: created" % "/".join(path))
         new_dircap = readonly(self.mkdir(contents))
         assert isinstance(new_dircap, str)
         self.store_dirhash(dirhash, new_dircap)
index 75e5b595f801a3ba40c64ccb5621ab4658087e4d..618a556f73573d8274780121f0f1974d1361796c 100644 (file)
@@ -768,6 +768,7 @@ class ConsolidateOptions(VDriveOptions):
         ]
     optFlags = [
         ("really", None, "Really remove old snapshot directories"),
+        ("verbose", "v", "Emit a line for every directory examined"),
         ]
     def parseArgs(self, where):
         self.where = where
index 6d7b6919be74211e0b9caff48321690f2c53b117..60f1441e7c156c964a32a2a5ca94877d831385f2 100644 (file)
@@ -170,6 +170,7 @@ class Consolidate(GridTestMixin, CLITestMixin, unittest.TestCase):
                       self.do_cli_good("debug", "consolidate",
                                        "--dbfile", dbfile,
                                        "--backupfile", backupfile,
+                                       "--verbose",
                                        "tahoe:"))
         def _check_consolidate_output1(out):
             lines = out.splitlines()