Ticket #1392: test_get_latencies_upgraded.darcs.patch

File test_get_latencies_upgraded.darcs.patch, 13.0 KB (added by arch_o_median, at 2011-04-23T17:27:08Z)
Line 
1Sat Apr 23 11:21:45 MDT 2011  wilcoxjg@gmail.com
2  * test_storage.py:  test_latencies fails because small sample size latencies do not yet correctly report None for ambiguous percentiles.
3
4Sat Apr 23 11:24:55 MDT 2011  wilcoxjg@gmail.com
5  * server.py:  get_latencies now correctly returns None as the value associated with small sample size percentiles.  This causes test_latencies in test_storage to pass.
6
7New patches:
8
9[test_storage.py:  test_latencies fails because small sample size latencies do not yet correctly report None for ambiguous percentiles.
10wilcoxjg@gmail.com**20110423172145
11 Ignore-this: 17ed521a1d4795ec0f9854a64952dcf9
12] {
13hunk ./src/allmydata/test/test_storage.py 1347
14         self.failUnless(abs(output["cancel"]["01_0_percentile"] -  0) < 1, output)
15         self.failUnless(abs(output["cancel"]["10_0_percentile"] -  2) < 1, output)
16         self.failUnless(abs(output["cancel"]["50_0_percentile"] - 10) < 1, output)
17-        self.failUnless(abs(output["cancel"]["90_0_percentile"] - 18) < 1, output)
18-        self.failUnless(abs(output["cancel"]["95_0_percentile"] - 18) < 1, output)
19-        self.failUnless(abs(output["cancel"]["99_0_percentile"] - 18) < 1, output)
20-        self.failUnless(abs(output["cancel"]["99_9_percentile"] - 18) < 1, output)
21+        self.failUnless(output["cancel"]["90_0_percentile"] == None, output)
22+        self.failUnless(output["cancel"]["95_0_percentile"] == None, output)
23+        self.failUnless(output["cancel"]["99_0_percentile"] == None, output)
24+        self.failUnless(output["cancel"]["99_9_percentile"] == None, output)
25 
26         self.failUnlessEqual(len(ss.latencies["get"]), 1)
27hunk ./src/allmydata/test/test_storage.py 1353
28-        self.failUnless(abs(output["get"]["mean"] - 5) < 1, output)
29-        self.failUnless(abs(output["get"]["01_0_percentile"] - 5) < 1, output)
30-        self.failUnless(abs(output["get"]["10_0_percentile"] - 5) < 1, output)
31-        self.failUnless(abs(output["get"]["50_0_percentile"] - 5) < 1, output)
32-        self.failUnless(abs(output["get"]["90_0_percentile"] - 5) < 1, output)
33-        self.failUnless(abs(output["get"]["95_0_percentile"] - 5) < 1, output)
34-        self.failUnless(abs(output["get"]["99_0_percentile"] - 5) < 1, output)
35-        self.failUnless(abs(output["get"]["99_9_percentile"] - 5) < 1, output)
36+        self.failUnless(output["get"]["mean"] == None, output)
37+        self.failUnless(output["get"]["01_0_percentile"] == None, output)
38+        self.failUnless(output["get"]["10_0_percentile"] == None, output)
39+        self.failUnless(output["get"]["50_0_percentile"] == None, output)
40+        self.failUnless(output["get"]["90_0_percentile"] == None, output)
41+        self.failUnless(output["get"]["95_0_percentile"] == None, output)
42+        self.failUnless(output["get"]["99_0_percentile"] == None, output)
43+        self.failUnless(output["get"]["99_9_percentile"] == None, output)
44 
45 def remove_tags(s):
46     s = re.sub(r'<[^>]*>', ' ', s)
47}
48[server.py:  get_latencies now correctly returns None as the value associated with small sample size percentiles.  This causes test_latencies in test_storage to pass.
49wilcoxjg@gmail.com**20110423172455
50 Ignore-this: a7a36d1bd8afc590440465cf966feaa1
51] {
52hunk ./src/allmydata/storage/server.py 132
53                 continue
54             stats = {}
55             samples = self.latencies[category][:]
56-            samples.sort()
57             count = len(samples)
58hunk ./src/allmydata/storage/server.py 133
59-            stats["mean"] = sum(samples) / count
60-            stats["01_0_percentile"] = samples[int(0.01 * count)]
61-            stats["10_0_percentile"] = samples[int(0.1 * count)]
62-            stats["50_0_percentile"] = samples[int(0.5 * count)]
63-            stats["90_0_percentile"] = samples[int(0.9 * count)]
64-            stats["95_0_percentile"] = samples[int(0.95 * count)]
65-            stats["99_0_percentile"] = samples[int(0.99 * count)]
66-            stats["99_9_percentile"] = samples[int(0.999 * count)]
67+            stats["samplesize"] = count
68+            samples.sort()
69+            percentindices = {}
70+            orderstatlist = [(0.01, "01_0_percentile"), (0.1, "10_0_percentile"),\
71+                             (0.50, "50_0_percentile"), (0.90, "90_0_percentile"),\
72+                             (0.95, "95_0_percentile"), (0.99, "99_0_percentile"),\
73+                             (0.999, "99_9_percentile")]
74+            if count > 1:
75+                stats["mean"] = sum(samples) / count
76+            else:
77+                stats["mean"] = None
78+                for percent, percentstring in orderstatlist:
79+                    stats[percentstring] = None
80+                output[category] = stats
81+                continue
82+            for percentile, percentilestring in orderstatlist:
83+                index = int(percentile * count)
84+                if not percentindices.has_key(index):
85+                    percentindices[index] = [percentilestring]
86+                else:
87+                    percentindices[index].append(percentilestring)
88+            for ostatindex, listostatstrings in percentindices.iteritems():
89+                if len(listostatstrings) > 1:
90+                    for percentilestring in listostatstrings:
91+                        stats[percentilestring] = None
92+                else:
93+                    stats[listostatstrings[0]] = samples[ostatindex]
94             output[category] = stats
95         return output
96 
97}
98
99Context:
100
101[Fix a test failure in test_package_initialization on Python 2.4.x due to exceptions being stringified differently than in later versions of Python. refs #1389
102david-sarah@jacaranda.org**20110411190738
103 Ignore-this: 7847d26bc117c328c679f08a7baee519
104]
105[tests: add test for including the ImportError message and traceback entry in the summary of errors from importing dependencies. refs #1389
106david-sarah@jacaranda.org**20110410155844
107 Ignore-this: fbecdbeb0d06a0f875fe8d4030aabafa
108]
109[allmydata/__init__.py: preserve the message and last traceback entry (file, line number, function, and source line) of ImportErrors in the package versions string. fixes #1389
110david-sarah@jacaranda.org**20110410155705
111 Ignore-this: 2f87b8b327906cf8bfca9440a0904900
112]
113[remove unused variable detected by pyflakes
114zooko@zooko.com**20110407172231
115 Ignore-this: 7344652d5e0720af822070d91f03daf9
116]
117[allmydata/__init__.py: Nicer reporting of unparseable version numbers in dependencies. fixes #1388
118david-sarah@jacaranda.org**20110401202750
119 Ignore-this: 9c6bd599259d2405e1caadbb3e0d8c7f
120]
121[update FTP-and-SFTP.rst: the necessary patch is included in Twisted-10.1
122Brian Warner <warner@lothar.com>**20110325232511
123 Ignore-this: d5307faa6900f143193bfbe14e0f01a
124]
125[control.py: remove all uses of s.get_serverid()
126warner@lothar.com**20110227011203
127 Ignore-this: f80a787953bd7fa3d40e828bde00e855
128]
129[web: remove some uses of s.get_serverid(), not all
130warner@lothar.com**20110227011159
131 Ignore-this: a9347d9cf6436537a47edc6efde9f8be
132]
133[immutable/downloader/fetcher.py: remove all get_serverid() calls
134warner@lothar.com**20110227011156
135 Ignore-this: fb5ef018ade1749348b546ec24f7f09a
136]
137[immutable/downloader/fetcher.py: fix diversity bug in server-response handling
138warner@lothar.com**20110227011153
139 Ignore-this: bcd62232c9159371ae8a16ff63d22c1b
140 
141 When blocks terminate (either COMPLETE or CORRUPT/DEAD/BADSEGNUM), the
142 _shares_from_server dict was being popped incorrectly (using shnum as the
143 index instead of serverid). I'm still thinking through the consequences of
144 this bug. It was probably benign and really hard to detect. I think it would
145 cause us to incorrectly believe that we're pulling too many shares from a
146 server, and thus prefer a different server rather than asking for a second
147 share from the first server. The diversity code is intended to spread out the
148 number of shares simultaneously being requested from each server, but with
149 this bug, it might be spreading out the total number of shares requested at
150 all, not just simultaneously. (note that SegmentFetcher is scoped to a single
151 segment, so the effect doesn't last very long).
152]
153[immutable/downloader/share.py: reduce get_serverid(), one left, update ext deps
154warner@lothar.com**20110227011150
155 Ignore-this: d8d56dd8e7b280792b40105e13664554
156 
157 test_download.py: create+check MyShare instances better, make sure they share
158 Server objects, now that finder.py cares
159]
160[immutable/downloader/finder.py: reduce use of get_serverid(), one left
161warner@lothar.com**20110227011146
162 Ignore-this: 5785be173b491ae8a78faf5142892020
163]
164[immutable/offloaded.py: reduce use of get_serverid() a bit more
165warner@lothar.com**20110227011142
166 Ignore-this: b48acc1b2ae1b311da7f3ba4ffba38f
167]
168[immutable/upload.py: reduce use of get_serverid()
169warner@lothar.com**20110227011138
170 Ignore-this: ffdd7ff32bca890782119a6e9f1495f6
171]
172[immutable/checker.py: remove some uses of s.get_serverid(), not all
173warner@lothar.com**20110227011134
174 Ignore-this: e480a37efa9e94e8016d826c492f626e
175]
176[add remaining get_* methods to storage_client.Server, NoNetworkServer, and
177warner@lothar.com**20110227011132
178 Ignore-this: 6078279ddf42b179996a4b53bee8c421
179 MockIServer stubs
180]
181[upload.py: rearrange _make_trackers a bit, no behavior changes
182warner@lothar.com**20110227011128
183 Ignore-this: 296d4819e2af452b107177aef6ebb40f
184]
185[happinessutil.py: finally rename merge_peers to merge_servers
186warner@lothar.com**20110227011124
187 Ignore-this: c8cd381fea1dd888899cb71e4f86de6e
188]
189[test_upload.py: factor out FakeServerTracker
190warner@lothar.com**20110227011120
191 Ignore-this: 6c182cba90e908221099472cc159325b
192]
193[test_upload.py: server-vs-tracker cleanup
194warner@lothar.com**20110227011115
195 Ignore-this: 2915133be1a3ba456e8603885437e03
196]
197[happinessutil.py: server-vs-tracker cleanup
198warner@lothar.com**20110227011111
199 Ignore-this: b856c84033562d7d718cae7cb01085a9
200]
201[upload.py: more tracker-vs-server cleanup
202warner@lothar.com**20110227011107
203 Ignore-this: bb75ed2afef55e47c085b35def2de315
204]
205[upload.py: fix var names to avoid confusion between 'trackers' and 'servers'
206warner@lothar.com**20110227011103
207 Ignore-this: 5d5e3415b7d2732d92f42413c25d205d
208]
209[refactor: s/peer/server/ in immutable/upload, happinessutil.py, test_upload
210warner@lothar.com**20110227011100
211 Ignore-this: 7ea858755cbe5896ac212a925840fe68
212 
213 No behavioral changes, just updating variable/method names and log messages.
214 The effects outside these three files should be minimal: some exception
215 messages changed (to say "server" instead of "peer"), and some internal class
216 names were changed. A few things still use "peer" to minimize external
217 changes, like UploadResults.timings["peer_selection"] and
218 happinessutil.merge_peers, which can be changed later.
219]
220[storage_client.py: clean up test_add_server/test_add_descriptor, remove .test_servers
221warner@lothar.com**20110227011056
222 Ignore-this: efad933e78179d3d5fdcd6d1ef2b19cc
223]
224[test_client.py, upload.py:: remove KiB/MiB/etc constants, and other dead code
225warner@lothar.com**20110227011051
226 Ignore-this: dc83c5794c2afc4f81e592f689c0dc2d
227]
228[test: increase timeout on a network test because Francois's ARM machine hit that timeout
229zooko@zooko.com**20110317165909
230 Ignore-this: 380c345cdcbd196268ca5b65664ac85b
231 I'm skeptical that the test was proceeding correctly but ran out of time. It seems more likely that it had gotten hung. But if we raise the timeout to an even more extravagant number then we can be even more certain that the test was never going to finish.
232]
233[docs/configuration.rst: add a "Frontend Configuration" section
234Brian Warner <warner@lothar.com>**20110222014323
235 Ignore-this: 657018aa501fe4f0efef9851628444ca
236 
237 this points to docs/frontends/*.rst, which were previously underlinked
238]
239[web/filenode.py: avoid calling req.finish() on closed HTTP connections. Closes #1366
240"Brian Warner <warner@lothar.com>"**20110221061544
241 Ignore-this: 799d4de19933f2309b3c0c19a63bb888
242]
243[Add unit tests for cross_check_pkg_resources_versus_import, and a regression test for ref #1355. This requires a little refactoring to make it testable.
244david-sarah@jacaranda.org**20110221015817
245 Ignore-this: 51d181698f8c20d3aca58b057e9c475a
246]
247[allmydata/__init__.py: .name was used in place of the correct .__name__ when printing an exception. Also, robustify string formatting by using %r instead of %s in some places. fixes #1355.
248david-sarah@jacaranda.org**20110221020125
249 Ignore-this: b0744ed58f161bf188e037bad077fc48
250]
251[Refactor StorageFarmBroker handling of servers
252Brian Warner <warner@lothar.com>**20110221015804
253 Ignore-this: 842144ed92f5717699b8f580eab32a51
254 
255 Pass around IServer instance instead of (peerid, rref) tuple. Replace
256 "descriptor" with "server". Other replacements:
257 
258  get_all_servers -> get_connected_servers/get_known_servers
259  get_servers_for_index -> get_servers_for_psi (now returns IServers)
260 
261 This change still needs to be pushed further down: lots of code is now
262 getting the IServer and then distributing (peerid, rref) internally.
263 Instead, it ought to distribute the IServer internally and delay
264 extracting a serverid or rref until the last moment.
265 
266 no_network.py was updated to retain parallelism.
267]
268[TAG allmydata-tahoe-1.8.2
269warner@lothar.com**20110131020101]
270Patch bundle hash:
271ccf0f0e94a2414ce6d2c7371c35c45a8f6768562