2 this is a load-generating client program. It does all of its work through a
3 given tahoe node (specified by URL), and performs random reads and writes
6 Run this in a directory with the following files:
7 server-URLs : a list of tahoe node URLs (one per line). Each operation
8 will use a randomly-selected server.
9 root.cap: (string) the top-level directory rwcap to use
10 delay: (float) seconds to delay between operations
11 operation-mix: "R/W": two ints, relative frequency of read and write ops
14 Set argv[1] to a per-client stats-NN.out file. This will will be updated with
15 running totals of bytes-per-second and operations-per-second. The stats from
16 multiple clients can be totalled together and averaged over time to compute
17 the traffic being accepted by the grid.
19 Each time a 'read' operation is performed, the client will begin at the root
20 and randomly choose a child. If the child is a directory, the client will
21 recurse. If the child is a file, the client will read the contents of the
24 Each time a 'write' operation is performed, the client will generate a target
25 filename (a random string). 90% of the time, the file will be written into
26 the same directory that was used last time (starting at the root). 10% of the
27 time, a new directory is created by assembling 1 to 5 pathnames chosen at
28 random. The client then writes a certain number of zero bytes to this file.
29 The filesize is determined with something like a power-law distribution, with
30 a mean of 10kB and a max of 100MB, so filesize=min(int(1.0/random(.0002)),1e8)
35 import os, sys, httplib, binascii
36 import urllib, simplejson, random, time, urlparse
38 if sys.argv[1] == "--stats":
39 statsfiles = sys.argv[2:]
40 # gather stats every 10 seconds, do a moving-window average of the last
49 for line in open(sf, "r").readlines():
50 name, value = line.split(":")
51 value = int(value.strip())
54 stats[name] += float(value)
56 delta = dict( [ (name,stats[name]-last_stats[name])
59 for name in sorted(delta.keys()):
60 avg = float(delta[name]) / float(DELAY)
61 print "%20s: %0.2f per second" % (name, avg)
63 while len(totals) > MAXSAMPLES:
68 print "MOVING WINDOW AVERAGE:"
69 for name in sorted(delta.keys()):
70 avg = sum([ s[name] for s in totals]) / (DELAY*len(totals))
71 print "%20s %0.2f per second" % (name, avg)
78 stats_out = sys.argv[1]
81 for url in open("server-URLs", "r").readlines():
84 server_urls.append(url)
85 root = open("root.cap", "r").read().strip()
86 delay = float(open("delay", "r").read().strip())
87 readfreq, writefreq = (
88 [int(x) for x in open("operation-mix", "r").read().strip().split("/")])
96 directories_written = 0
98 def listdir(nodeurl, root, remote_pathname):
99 if nodeurl[-1] != "/":
101 url = nodeurl + "uri/%s/" % urllib.quote(root)
103 url += urllib.quote(remote_pathname)
105 data = urllib.urlopen(url).read()
107 parsed = simplejson.loads(data)
110 print "DATA was", data
113 assert nodetype == "dirnode"
114 global directories_read
115 directories_read += 1
116 children = dict( [(unicode(name),value)
118 in d["children"].iteritems()] )
122 def choose_random_descendant(server_url, root, pathname=""):
123 children = listdir(server_url, root, pathname)
124 name = random.choice(children.keys())
125 child = children[name]
127 new_pathname = pathname + "/" + name
130 if child[0] == "filenode":
132 return choose_random_descendant(server_url, root, new_pathname)
134 def read_and_discard(nodeurl, root, pathname):
135 if nodeurl[-1] != "/":
137 url = nodeurl + "uri/%s/" % urllib.quote(root)
139 url += urllib.quote(pathname)
140 f = urllib.urlopen(url)
141 global bytes_downloaded
146 bytes_downloaded += len(data)
150 "dreamland/disengaging/hucksters",
151 "dreamland/disengaging/klondikes",
152 "dreamland/disengaging/neatly",
153 "dreamland/cottages/richmond",
154 "dreamland/cottages/perhaps",
155 "dreamland/cottages/spies",
156 "dreamland/finder/diversion",
157 "dreamland/finder/cigarette",
158 "dreamland/finder/album",
159 "hazing/licences/comedian",
160 "hazing/licences/goat",
161 "hazing/licences/shopkeeper",
162 "hazing/regiment/frigate",
163 "hazing/regiment/quackery",
164 "hazing/regiment/centerpiece",
165 "hazing/disassociate/mob",
166 "hazing/disassociate/nihilistic",
167 "hazing/disassociate/bilbo",
170 def create_random_directory():
171 d = random.choice(directories)
172 pieces = d.split("/")
173 numsegs = random.randint(1, len(pieces))
174 return "/".join(pieces[0:numsegs])
176 def generate_filename():
177 fn = binascii.hexlify(os.urandom(4))
182 size = random.expovariate(1.0 / mean)
183 return int(min(size, 100e6))
185 # copied from twisted/web/client.py
186 def parse_url(url, defaultPort=None):
188 parsed = urlparse.urlparse(url)
190 path = urlparse.urlunparse(('','')+parsed[2:])
191 if defaultPort is None:
192 if scheme == 'https':
196 host, port = parsed[1], defaultPort
198 host, port = host.split(':')
202 return scheme, host, port, path
204 def generate_and_put(nodeurl, root, remote_filename, size):
205 if nodeurl[-1] != "/":
207 url = nodeurl + "uri/%s/" % urllib.quote(root)
208 url += urllib.quote(remote_filename)
210 scheme, host, port, path = parse_url(url)
212 c = httplib.HTTPConnection(host, port)
213 elif scheme == "https":
214 c = httplib.HTTPSConnection(host, port)
216 raise ValueError("unknown scheme '%s', need http or https" % scheme)
217 c.putrequest("PUT", path)
218 c.putheader("Hostname", host)
219 c.putheader("User-Agent", "tahoe-check-load")
220 c.putheader("Connection", "close")
221 c.putheader("Content-Length", "%d" % size)
223 global bytes_uploaded
225 chunksize = min(size, 4096)
227 c.send("\x00" * chunksize)
228 bytes_uploaded += chunksize
229 return c.getresponse()
232 current_writedir = ""
236 if random.uniform(0, readfreq+writefreq) < readfreq:
241 server = random.choice(server_urls)
243 pathname = choose_random_descendant(server, root)
244 print " reading", pathname
245 read_and_discard(server, root, pathname)
246 files_downloaded += 1
248 if random.uniform(0, 100) < 10:
249 current_writedir = create_random_directory()
250 filename = generate_filename()
252 pathname = current_writedir + "/" + filename
255 print " writing", pathname
258 generate_and_put(server, root, pathname, size)
261 f = open(stats_out+".tmp", "w")
262 f.write("files-uploaded: %d\n" % files_uploaded)
263 f.write("files-downloaded: %d\n" % files_downloaded)
264 f.write("bytes-uploaded: %d\n" % bytes_uploaded)
265 f.write("bytes-downloaded: %d\n" % bytes_downloaded)
266 f.write("directories-read: %d\n" % directories_read)
267 f.write("directories-written: %d\n" % directories_written)
269 os.rename(stats_out+".tmp", stats_out)