-#! /usr/bin/python
-
"""
this is a load-generating client program. It does all of its work through a
given tahoe node (specified by URL), and performs random reads and writes
import os, sys, httplib, binascii
import urllib, simplejson, random, time, urlparse
+if sys.argv[1] == "--stats":
+ statsfiles = sys.argv[2:]
+ # gather stats every 10 seconds, do a moving-window average of the last
+ # 60 seconds
+ DELAY = 10
+ MAXSAMPLES = 6
+ totals = []
+ last_stats = {}
+ while True:
+ stats = {}
+ for sf in statsfiles:
+ for line in open(sf, "r").readlines():
+ name, value = line.split(":")
+ value = int(value.strip())
+ if name not in stats:
+ stats[name] = 0
+ stats[name] += float(value)
+ del name
+ if last_stats:
+ delta = dict( [ (n,stats[n]-last_stats[n])
+ for n in stats ] )
+ print "THIS SAMPLE:"
+ for name in sorted(delta.keys()):
+ avg = float(delta[name]) / float(DELAY)
+ print "%20s: %0.2f per second" % (name, avg)
+ totals.append(delta)
+ while len(totals) > MAXSAMPLES:
+ totals.pop(0)
+
+ # now compute average
+ print
+ print "MOVING WINDOW AVERAGE:"
+ for name in sorted(delta.keys()):
+ avg = sum([ s[name] for s in totals]) / (DELAY*len(totals))
+ print "%20s %0.2f per second" % (name, avg)
+
+ last_stats = stats
+ print
+ print
+ time.sleep(DELAY)
+
stats_out = sys.argv[1]
server_urls = []
[int(x) for x in open("operation-mix", "r").read().strip().split("/")])
+files_uploaded = 0
+files_downloaded = 0
+bytes_uploaded = 0
+bytes_downloaded = 0
+directories_read = 0
+directories_written = 0
-def listdir(nodeurl, root, vdrive_pathname):
+def listdir(nodeurl, root, remote_pathname):
if nodeurl[-1] != "/":
nodeurl += "/"
url = nodeurl + "uri/%s/" % urllib.quote(root)
- if vdrive_pathname:
- url += urllib.quote(vdrive_pathname)
+ if remote_pathname:
+ url += urllib.quote(remote_pathname)
url += "?t=json"
data = urllib.urlopen(url).read()
try:
raise
nodetype, d = parsed
assert nodetype == "dirnode"
- return d['children']
+ global directories_read
+ directories_read += 1
+ children = dict( [(unicode(name),value)
+ for (name,value)
+ in d["children"].iteritems()] )
+ return children
def choose_random_descendant(server_url, root, pathname=""):
if pathname:
url += urllib.quote(pathname)
f = urllib.urlopen(url)
+ global bytes_downloaded
while True:
data = f.read(4096)
if not data:
break
+ bytes_downloaded += len(data)
+
directories = [
"dreamland/disengaging/hucksters",
path = "/"
return scheme, host, port, path
-def generate_and_put(nodeurl, root, vdrive_fname, size):
+def generate_and_put(nodeurl, root, remote_filename, size):
if nodeurl[-1] != "/":
nodeurl += "/"
url = nodeurl + "uri/%s/" % urllib.quote(root)
- url += urllib.quote(vdrive_fname)
+ url += urllib.quote(remote_filename)
scheme, host, port, path = parse_url(url)
if scheme == "http":
c.putheader("Connection", "close")
c.putheader("Content-Length", "%d" % size)
c.endheaders()
+ global bytes_uploaded
while size:
chunksize = min(size, 4096)
size -= chunksize
c.send("\x00" * chunksize)
+ bytes_uploaded += chunksize
return c.getresponse()
while True:
time.sleep(delay)
- if random.uniform(0, readfreq+writefreq) > readfreq:
+ if random.uniform(0, readfreq+writefreq) < readfreq:
op = "read"
else:
op = "write"
pathname = choose_random_descendant(server, root)
print " reading", pathname
read_and_discard(server, root, pathname)
+ files_downloaded += 1
elif op == "write":
if random.uniform(0, 100) < 10:
current_writedir = create_random_directory()
size = choose_size()
print " size", size
generate_and_put(server, root, pathname, size)
+ files_uploaded += 1
+
+ f = open(stats_out+".tmp", "w")
+ f.write("files-uploaded: %d\n" % files_uploaded)
+ f.write("files-downloaded: %d\n" % files_downloaded)
+ f.write("bytes-uploaded: %d\n" % bytes_uploaded)
+ f.write("bytes-downloaded: %d\n" % bytes_downloaded)
+ f.write("directories-read: %d\n" % directories_read)
+ f.write("directories-written: %d\n" % directories_written)
+ f.close()
+ os.rename(stats_out+".tmp", stats_out)