sdmf_versions[si][0].add( ver )
sdmf_versions[si][1].add( line )
+del si
chk_multiple_encodings = [(si,lines)
for si,(encodings,lines) in chk_encodings.items()
if len(encodings) > 1]
self._history = history
self._download_status = download_status
- k, N = self._verifycap.needed_shares, self._verifycap.total_shares
- self.share_hash_tree = IncompleteHashTree(N)
+ self.share_hash_tree = IncompleteHashTree(self._verifycap.total_shares)
# we guess the segment size, so Segmentation can pull non-initial
# segments in a single roundtrip. This populates
name, source = self.get_source_info(ss)
sources.append( (name, source) )
+ del name
have_source_dirs = bool([s for (name,s) in sources
if isinstance(s, (LocalDirectorySource,
TahoeDirectorySource))])
self.increment(rec, "examined-buckets", 1)
if sharetype:
self.increment(rec, "examined-buckets-"+sharetype, 1)
+ del wks
try:
bucket_diskbytes = s.st_blocks * 512
if name not in stats:
stats[name] = 0
stats[name] += float(value)
+ del name
if last_stats:
delta = dict( [ (name,stats[name]-last_stats[name])
for name in stats ] )
rows[free_slot] = ev["finish_time"]
ev["row"] = (groupnum, free_slot)
new_events.append(ev)
+ del groupnum
# maybe also return serverid_to_group, groupnum_to_rows, and some
# indication of the highest finish_time
#