1 | """ |
---|
2 | Ported to Python 3. |
---|
3 | """ |
---|
4 | |
---|
5 | import os.path, re |
---|
6 | from urllib.parse import quote as url_quote |
---|
7 | import json |
---|
8 | from io import StringIO |
---|
9 | |
---|
10 | from bs4 import BeautifulSoup |
---|
11 | |
---|
12 | from twisted.web import resource |
---|
13 | from allmydata import uri, dirnode |
---|
14 | from allmydata.util import base32 |
---|
15 | from allmydata.util.encodingutil import to_bytes |
---|
16 | from allmydata.util.consumer import download_to_data |
---|
17 | from allmydata.util.netstring import split_netstring |
---|
18 | from allmydata.unknown import UnknownNode |
---|
19 | from allmydata.storage.shares import get_share_file |
---|
20 | from allmydata.scripts.debug import CorruptShareOptions, corrupt_share |
---|
21 | from allmydata.immutable import upload |
---|
22 | from allmydata.mutable import publish |
---|
23 | |
---|
24 | from ...web.common import ( |
---|
25 | render_exception, |
---|
26 | ) |
---|
27 | from .. import common_util as testutil |
---|
28 | from ..common import WebErrorMixin, ShouldFailMixin |
---|
29 | from ..no_network import GridTestMixin |
---|
30 | from .common import ( |
---|
31 | assert_soup_has_favicon, |
---|
32 | unknown_immcap, |
---|
33 | unknown_rocap, |
---|
34 | unknown_rwcap, |
---|
35 | ) |
---|
36 | |
---|
37 | from ..common import ( |
---|
38 | AsyncTestCase, |
---|
39 | ) |
---|
40 | |
---|
41 | from testtools.matchers import ( |
---|
42 | Equals, |
---|
43 | Contains, |
---|
44 | Not, |
---|
45 | HasLength, |
---|
46 | EndsWith, |
---|
47 | ) |
---|
48 | |
---|
49 | from testtools.twistedsupport import flush_logged_errors |
---|
50 | |
---|
51 | |
---|
52 | DIR_HTML_TAG = '<html lang="en">' |
---|
53 | |
---|
54 | class CompletelyUnhandledError(Exception): |
---|
55 | pass |
---|
56 | |
---|
57 | class ErrorBoom(resource.Resource, object): |
---|
58 | @render_exception |
---|
59 | def render(self, req): |
---|
60 | raise CompletelyUnhandledError("whoops") |
---|
61 | |
---|
62 | class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMixin, AsyncTestCase): |
---|
63 | |
---|
64 | def CHECK(self, ign, which, args, clientnum=0): |
---|
65 | fileurl = self.fileurls[which] |
---|
66 | url = fileurl + "?" + args |
---|
67 | return self.GET_unicode(url, method="POST", clientnum=clientnum) |
---|
68 | |
---|
69 | def GET_unicode(self, *args, **kwargs): |
---|
70 | """Send an HTTP request, but convert result to Unicode string.""" |
---|
71 | d = GridTestMixin.GET(self, *args, **kwargs) |
---|
72 | d.addCallback(str, "utf-8") |
---|
73 | return d |
---|
74 | |
---|
75 | def test_filecheck(self): |
---|
76 | self.basedir = "web/Grid/filecheck" |
---|
77 | self.set_up_grid() |
---|
78 | c0 = self.g.clients[0] |
---|
79 | self.uris = {} |
---|
80 | DATA = b"data" * 100 |
---|
81 | d = c0.upload(upload.Data(DATA, convergence=b"")) |
---|
82 | def _stash_uri(ur, which): |
---|
83 | self.uris[which] = ur.get_uri() |
---|
84 | d.addCallback(_stash_uri, "good") |
---|
85 | d.addCallback(lambda ign: |
---|
86 | c0.upload(upload.Data(DATA+b"1", convergence=b""))) |
---|
87 | d.addCallback(_stash_uri, "sick") |
---|
88 | d.addCallback(lambda ign: |
---|
89 | c0.upload(upload.Data(DATA+b"2", convergence=b""))) |
---|
90 | d.addCallback(_stash_uri, "dead") |
---|
91 | def _stash_mutable_uri(n, which): |
---|
92 | self.uris[which] = n.get_uri() |
---|
93 | assert isinstance(self.uris[which], bytes) |
---|
94 | d.addCallback(lambda ign: |
---|
95 | c0.create_mutable_file(publish.MutableData(DATA+b"3"))) |
---|
96 | d.addCallback(_stash_mutable_uri, "corrupt") |
---|
97 | d.addCallback(lambda ign: |
---|
98 | c0.upload(upload.Data(b"literal", convergence=b""))) |
---|
99 | d.addCallback(_stash_uri, "small") |
---|
100 | d.addCallback(lambda ign: c0.create_immutable_dirnode({})) |
---|
101 | d.addCallback(_stash_mutable_uri, "smalldir") |
---|
102 | |
---|
103 | def _compute_fileurls(ignored): |
---|
104 | self.fileurls = {} |
---|
105 | for which in self.uris: |
---|
106 | self.fileurls[which] = "uri/" + url_quote(self.uris[which]) |
---|
107 | d.addCallback(_compute_fileurls) |
---|
108 | |
---|
109 | def _clobber_shares(ignored): |
---|
110 | good_shares = self.find_uri_shares(self.uris["good"]) |
---|
111 | self.failUnlessReallyEqual(len(good_shares), 10) |
---|
112 | sick_shares = self.find_uri_shares(self.uris["sick"]) |
---|
113 | os.unlink(sick_shares[0][2]) |
---|
114 | dead_shares = self.find_uri_shares(self.uris["dead"]) |
---|
115 | for i in range(1, 10): |
---|
116 | os.unlink(dead_shares[i][2]) |
---|
117 | c_shares = self.find_uri_shares(self.uris["corrupt"]) |
---|
118 | cso = CorruptShareOptions() |
---|
119 | cso.stdout = StringIO() |
---|
120 | cso.parseOptions([c_shares[0][2]]) |
---|
121 | corrupt_share(cso) |
---|
122 | d.addCallback(_clobber_shares) |
---|
123 | |
---|
124 | d.addCallback(self.CHECK, "good", "t=check") |
---|
125 | def _got_html_good(res): |
---|
126 | self.assertThat(res, Contains("Healthy")) |
---|
127 | self.assertThat(res, Not(Contains("Not Healthy", ))) |
---|
128 | soup = BeautifulSoup(res, 'html5lib') |
---|
129 | assert_soup_has_favicon(self, soup) |
---|
130 | |
---|
131 | d.addCallback(_got_html_good) |
---|
132 | d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere") |
---|
133 | def _got_html_good_return_to(res): |
---|
134 | self.assertThat(res, Contains("Healthy")) |
---|
135 | self.assertThat(res, Not(Contains("Not Healthy"))) |
---|
136 | self.assertThat(res, Contains('<a href="somewhere">Return to file')) |
---|
137 | d.addCallback(_got_html_good_return_to) |
---|
138 | d.addCallback(self.CHECK, "good", "t=check&output=json") |
---|
139 | def _got_json_good(res): |
---|
140 | r = json.loads(res) |
---|
141 | self.failUnlessEqual(r["summary"], "Healthy") |
---|
142 | self.failUnless(r["results"]["healthy"]) |
---|
143 | self.assertThat(r["results"], Not(Contains("needs-rebalancing",))) |
---|
144 | self.failUnless(r["results"]["recoverable"]) |
---|
145 | d.addCallback(_got_json_good) |
---|
146 | |
---|
147 | d.addCallback(self.CHECK, "small", "t=check") |
---|
148 | def _got_html_small(res): |
---|
149 | self.assertThat(res, Contains("Literal files are always healthy")) |
---|
150 | self.assertThat(res, Not(Contains("Not Healthy"))) |
---|
151 | d.addCallback(_got_html_small) |
---|
152 | d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere") |
---|
153 | def _got_html_small_return_to(res): |
---|
154 | self.assertThat(res, Contains("Literal files are always healthy")) |
---|
155 | self.assertThat(res, Not(Contains("Not Healthy"))) |
---|
156 | self.assertThat(res, Contains('<a href="somewhere">Return to file')) |
---|
157 | d.addCallback(_got_html_small_return_to) |
---|
158 | d.addCallback(self.CHECK, "small", "t=check&output=json") |
---|
159 | def _got_json_small(res): |
---|
160 | r = json.loads(res) |
---|
161 | self.failUnlessEqual(r["storage-index"], "") |
---|
162 | self.failUnless(r["results"]["healthy"]) |
---|
163 | d.addCallback(_got_json_small) |
---|
164 | |
---|
165 | d.addCallback(self.CHECK, "smalldir", "t=check") |
---|
166 | def _got_html_smalldir(res): |
---|
167 | self.assertThat(res, Contains("Literal files are always healthy")) |
---|
168 | self.assertThat(res, Not(Contains("Not Healthy"))) |
---|
169 | d.addCallback(_got_html_smalldir) |
---|
170 | d.addCallback(self.CHECK, "smalldir", "t=check&output=json") |
---|
171 | def _got_json_smalldir(res): |
---|
172 | r = json.loads(res) |
---|
173 | self.failUnlessEqual(r["storage-index"], "") |
---|
174 | self.failUnless(r["results"]["healthy"]) |
---|
175 | d.addCallback(_got_json_smalldir) |
---|
176 | |
---|
177 | d.addCallback(self.CHECK, "sick", "t=check") |
---|
178 | def _got_html_sick(res): |
---|
179 | self.assertThat(res, Contains("Not Healthy")) |
---|
180 | d.addCallback(_got_html_sick) |
---|
181 | d.addCallback(self.CHECK, "sick", "t=check&output=json") |
---|
182 | def _got_json_sick(res): |
---|
183 | r = json.loads(res) |
---|
184 | self.failUnlessEqual(r["summary"], |
---|
185 | "Not Healthy: 9 shares (enc 3-of-10)") |
---|
186 | self.assertThat(r["results"]["healthy"], Equals(False)) |
---|
187 | self.failUnless(r["results"]["recoverable"]) |
---|
188 | self.assertThat(r["results"], Not(Contains("needs-rebalancing"))) |
---|
189 | d.addCallback(_got_json_sick) |
---|
190 | |
---|
191 | d.addCallback(self.CHECK, "dead", "t=check") |
---|
192 | def _got_html_dead(res): |
---|
193 | self.assertThat(res, Contains("Not Healthy")) |
---|
194 | d.addCallback(_got_html_dead) |
---|
195 | d.addCallback(self.CHECK, "dead", "t=check&output=json") |
---|
196 | def _got_json_dead(res): |
---|
197 | r = json.loads(res) |
---|
198 | self.failUnlessEqual(r["summary"], |
---|
199 | "Not Healthy: 1 shares (enc 3-of-10)") |
---|
200 | self.assertThat(r["results"]["healthy"], Equals(False)) |
---|
201 | self.assertThat(r["results"]["recoverable"], Equals(False)) |
---|
202 | self.assertThat(r["results"], Not(Contains("needs-rebalancing"))) |
---|
203 | d.addCallback(_got_json_dead) |
---|
204 | |
---|
205 | d.addCallback(self.CHECK, "corrupt", "t=check&verify=true") |
---|
206 | def _got_html_corrupt(res): |
---|
207 | self.assertThat(res, Contains("Not Healthy! : Unhealthy")) |
---|
208 | d.addCallback(_got_html_corrupt) |
---|
209 | d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json") |
---|
210 | def _got_json_corrupt(res): |
---|
211 | r = json.loads(res) |
---|
212 | self.assertThat(r["summary"], Contains("Unhealthy: 9 shares (enc 3-of-10)")) |
---|
213 | self.assertThat(r["results"]["healthy"], Equals(False)) |
---|
214 | self.failUnless(r["results"]["recoverable"]) |
---|
215 | self.assertThat(r["results"], Not(Contains("needs-rebalancing"))) |
---|
216 | self.failUnlessReallyEqual(r["results"]["count-happiness"], 9) |
---|
217 | self.failUnlessReallyEqual(r["results"]["count-shares-good"], 9) |
---|
218 | self.failUnlessReallyEqual(r["results"]["count-corrupt-shares"], 1) |
---|
219 | d.addCallback(_got_json_corrupt) |
---|
220 | |
---|
221 | d.addErrback(self.explain_web_error) |
---|
222 | return d |
---|
223 | |
---|
224 | def test_repair_html(self): |
---|
225 | self.basedir = "web/Grid/repair_html" |
---|
226 | self.set_up_grid() |
---|
227 | c0 = self.g.clients[0] |
---|
228 | self.uris = {} |
---|
229 | DATA = b"data" * 100 |
---|
230 | d = c0.upload(upload.Data(DATA, convergence=b"")) |
---|
231 | def _stash_uri(ur, which): |
---|
232 | self.uris[which] = ur.get_uri() |
---|
233 | d.addCallback(_stash_uri, "good") |
---|
234 | d.addCallback(lambda ign: |
---|
235 | c0.upload(upload.Data(DATA+b"1", convergence=b""))) |
---|
236 | d.addCallback(_stash_uri, "sick") |
---|
237 | d.addCallback(lambda ign: |
---|
238 | c0.upload(upload.Data(DATA+b"2", convergence=b""))) |
---|
239 | d.addCallback(_stash_uri, "dead") |
---|
240 | def _stash_mutable_uri(n, which): |
---|
241 | self.uris[which] = n.get_uri() |
---|
242 | assert isinstance(self.uris[which], bytes) |
---|
243 | d.addCallback(lambda ign: |
---|
244 | c0.create_mutable_file(publish.MutableData(DATA+b"3"))) |
---|
245 | d.addCallback(_stash_mutable_uri, "corrupt") |
---|
246 | |
---|
247 | def _compute_fileurls(ignored): |
---|
248 | self.fileurls = {} |
---|
249 | for which in self.uris: |
---|
250 | self.fileurls[which] = "uri/" + url_quote(self.uris[which]) |
---|
251 | d.addCallback(_compute_fileurls) |
---|
252 | |
---|
253 | def _clobber_shares(ignored): |
---|
254 | good_shares = self.find_uri_shares(self.uris["good"]) |
---|
255 | self.failUnlessReallyEqual(len(good_shares), 10) |
---|
256 | sick_shares = self.find_uri_shares(self.uris["sick"]) |
---|
257 | os.unlink(sick_shares[0][2]) |
---|
258 | dead_shares = self.find_uri_shares(self.uris["dead"]) |
---|
259 | for i in range(1, 10): |
---|
260 | os.unlink(dead_shares[i][2]) |
---|
261 | c_shares = self.find_uri_shares(self.uris["corrupt"]) |
---|
262 | cso = CorruptShareOptions() |
---|
263 | cso.stdout = StringIO() |
---|
264 | cso.parseOptions([c_shares[0][2]]) |
---|
265 | corrupt_share(cso) |
---|
266 | d.addCallback(_clobber_shares) |
---|
267 | |
---|
268 | d.addCallback(self.CHECK, "good", "t=check&repair=true") |
---|
269 | def _got_html_good(res): |
---|
270 | self.assertThat(res, Contains("Healthy")) |
---|
271 | self.assertThat(res, Not(Contains("Not Healthy"))) |
---|
272 | self.assertThat(res, Contains("No repair necessary", )) |
---|
273 | soup = BeautifulSoup(res, 'html5lib') |
---|
274 | assert_soup_has_favicon(self, soup) |
---|
275 | |
---|
276 | d.addCallback(_got_html_good) |
---|
277 | |
---|
278 | d.addCallback(self.CHECK, "sick", "t=check&repair=true") |
---|
279 | def _got_html_sick(res): |
---|
280 | self.assertThat(res, Contains("Healthy : healthy")) |
---|
281 | self.assertThat(res, Not(Contains("Not Healthy"))) |
---|
282 | self.assertThat(res, Contains("Repair successful")) |
---|
283 | d.addCallback(_got_html_sick) |
---|
284 | |
---|
285 | # repair of a dead file will fail, of course, but it isn't yet |
---|
286 | # clear how this should be reported. Right now it shows up as |
---|
287 | # a "410 Gone". |
---|
288 | # |
---|
289 | #d.addCallback(self.CHECK, "dead", "t=check&repair=true") |
---|
290 | #def _got_html_dead(res): |
---|
291 | # print(res) |
---|
292 | # self.failUnlessIn("Healthy : healthy", res) |
---|
293 | # self.failIfIn("Not Healthy", res) |
---|
294 | # self.failUnlessIn("No repair necessary", res) |
---|
295 | #d.addCallback(_got_html_dead) |
---|
296 | |
---|
297 | d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true") |
---|
298 | def _got_html_corrupt(res): |
---|
299 | self.assertThat(res, Contains("Healthy : Healthy")) |
---|
300 | self.assertThat(res, Not(Contains("Not Healthy"))) |
---|
301 | self.assertThat(res, Contains("Repair successful")) |
---|
302 | d.addCallback(_got_html_corrupt) |
---|
303 | |
---|
304 | d.addErrback(self.explain_web_error) |
---|
305 | return d |
---|
306 | |
---|
307 | def test_repair_json(self): |
---|
308 | self.basedir = "web/Grid/repair_json" |
---|
309 | self.set_up_grid() |
---|
310 | c0 = self.g.clients[0] |
---|
311 | self.uris = {} |
---|
312 | DATA = b"data" * 100 |
---|
313 | d = c0.upload(upload.Data(DATA+b"1", convergence=b"")) |
---|
314 | def _stash_uri(ur, which): |
---|
315 | self.uris[which] = ur.get_uri() |
---|
316 | d.addCallback(_stash_uri, "sick") |
---|
317 | |
---|
318 | def _compute_fileurls(ignored): |
---|
319 | self.fileurls = {} |
---|
320 | for which in self.uris: |
---|
321 | self.fileurls[which] = "uri/" + url_quote(self.uris[which]) |
---|
322 | d.addCallback(_compute_fileurls) |
---|
323 | |
---|
324 | def _clobber_shares(ignored): |
---|
325 | sick_shares = self.find_uri_shares(self.uris["sick"]) |
---|
326 | os.unlink(sick_shares[0][2]) |
---|
327 | d.addCallback(_clobber_shares) |
---|
328 | |
---|
329 | d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json") |
---|
330 | def _got_json_sick(res): |
---|
331 | r = json.loads(res) |
---|
332 | self.failUnlessReallyEqual(r["repair-attempted"], True) |
---|
333 | self.failUnlessReallyEqual(r["repair-successful"], True) |
---|
334 | self.failUnlessEqual(r["pre-repair-results"]["summary"], |
---|
335 | "Not Healthy: 9 shares (enc 3-of-10)") |
---|
336 | self.failIf(r["pre-repair-results"]["results"]["healthy"]) |
---|
337 | self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy") |
---|
338 | self.failUnless(r["post-repair-results"]["results"]["healthy"]) |
---|
339 | d.addCallback(_got_json_sick) |
---|
340 | |
---|
341 | d.addErrback(self.explain_web_error) |
---|
342 | return d |
---|
343 | |
---|
344 | def test_unknown(self, immutable=False): |
---|
345 | self.basedir = "web/Grid/unknown" |
---|
346 | if immutable: |
---|
347 | self.basedir = "web/Grid/unknown-immutable" |
---|
348 | |
---|
349 | self.set_up_grid(oneshare=True) |
---|
350 | c0 = self.g.clients[0] |
---|
351 | self.uris = {} |
---|
352 | self.fileurls = {} |
---|
353 | |
---|
354 | # the future cap format may contain slashes, which must be tolerated |
---|
355 | expected_info_url = "uri/%s?t=info" % url_quote(unknown_rwcap, |
---|
356 | safe="") |
---|
357 | |
---|
358 | if immutable: |
---|
359 | name = u"future-imm" |
---|
360 | future_node = UnknownNode(None, unknown_immcap, deep_immutable=True) |
---|
361 | d = c0.create_immutable_dirnode({name: (future_node, {})}) |
---|
362 | else: |
---|
363 | name = u"future" |
---|
364 | future_node = UnknownNode(unknown_rwcap, unknown_rocap) |
---|
365 | d = c0.create_dirnode() |
---|
366 | |
---|
367 | def _stash_root_and_create_file(n): |
---|
368 | self.rootnode = n |
---|
369 | self.rooturl = "uri/" + url_quote(n.get_uri()) |
---|
370 | self.rourl = "uri/" + url_quote(n.get_readonly_uri()) |
---|
371 | if not immutable: |
---|
372 | return self.rootnode.set_node(name, future_node) |
---|
373 | d.addCallback(_stash_root_and_create_file) |
---|
374 | |
---|
375 | # make sure directory listing tolerates unknown nodes |
---|
376 | d.addCallback(lambda ign: self.GET(self.rooturl)) |
---|
377 | def _check_directory_html(res, expected_type_suffix): |
---|
378 | pattern = re.compile(br'<td>\?%s</td>[ \t\n\r]*' |
---|
379 | b'<td>%s</td>' % ( |
---|
380 | expected_type_suffix, name.encode("ascii")), |
---|
381 | re.DOTALL) |
---|
382 | self.failUnless(re.search(pattern, res), res) |
---|
383 | # find the More Info link for name, should be relative |
---|
384 | mo = re.search(br'<a href="([^"]+)">More Info</a>', res) |
---|
385 | info_url = mo.group(1) |
---|
386 | self.failUnlessReallyEqual(info_url, b"%s?t=info" % (name.encode("ascii"),)) |
---|
387 | if immutable: |
---|
388 | d.addCallback(_check_directory_html, b"-IMM") |
---|
389 | else: |
---|
390 | d.addCallback(_check_directory_html, b"") |
---|
391 | |
---|
392 | d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json")) |
---|
393 | def _check_directory_json(res, expect_rw_uri): |
---|
394 | data = json.loads(res) |
---|
395 | self.failUnlessEqual(data[0], "dirnode") |
---|
396 | f = data[1]["children"][name] |
---|
397 | self.failUnlessEqual(f[0], "unknown") |
---|
398 | if expect_rw_uri: |
---|
399 | self.failUnlessReallyEqual(to_bytes(f[1]["rw_uri"]), unknown_rwcap, data) |
---|
400 | else: |
---|
401 | self.assertThat(f[1], Not(Contains("rw_uri"))) |
---|
402 | if immutable: |
---|
403 | self.failUnlessReallyEqual(to_bytes(f[1]["ro_uri"]), unknown_immcap, data) |
---|
404 | else: |
---|
405 | self.failUnlessReallyEqual(to_bytes(f[1]["ro_uri"]), unknown_rocap, data) |
---|
406 | self.assertThat(f[1], Contains("metadata")) |
---|
407 | d.addCallback(_check_directory_json, expect_rw_uri=not immutable) |
---|
408 | |
---|
409 | def _check_info(res, expect_rw_uri, expect_ro_uri): |
---|
410 | if expect_rw_uri: |
---|
411 | self.assertThat(res, Contains(unknown_rwcap)) |
---|
412 | if expect_ro_uri: |
---|
413 | if immutable: |
---|
414 | self.assertThat(res, Contains(unknown_immcap)) |
---|
415 | else: |
---|
416 | self.assertThat(res, Contains(unknown_rocap)) |
---|
417 | else: |
---|
418 | self.assertThat(res, Not(Contains(unknown_rocap))) |
---|
419 | res = str(res, "utf-8") |
---|
420 | self.assertThat(res, Contains("Object Type: <span>unknown</span>")) |
---|
421 | self.assertThat(res, Not(Contains("Raw data as"))) |
---|
422 | self.assertThat(res, Not(Contains("Directory writecap"))) |
---|
423 | self.assertThat(res, Not(Contains("Checker Operations"))) |
---|
424 | self.assertThat(res, Not(Contains("Mutable File Operations"))) |
---|
425 | self.assertThat(res, Not(Contains("Directory Operations"))) |
---|
426 | |
---|
427 | # FIXME: these should have expect_rw_uri=not immutable; I don't know |
---|
428 | # why they fail. Possibly related to ticket #922. |
---|
429 | |
---|
430 | d.addCallback(lambda ign: self.GET(expected_info_url)) |
---|
431 | d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False) |
---|
432 | d.addCallback(lambda ign: self.GET("%s/%s?t=info" % (self.rooturl, name))) |
---|
433 | d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True) |
---|
434 | |
---|
435 | def _check_json(res, expect_rw_uri): |
---|
436 | data = json.loads(res) |
---|
437 | self.failUnlessEqual(data[0], "unknown") |
---|
438 | if expect_rw_uri: |
---|
439 | self.failUnlessReallyEqual(to_bytes(data[1]["rw_uri"]), unknown_rwcap, data) |
---|
440 | else: |
---|
441 | self.assertThat(data[1], Not(Contains("rw_uri"))) |
---|
442 | |
---|
443 | if immutable: |
---|
444 | self.failUnlessReallyEqual(to_bytes(data[1]["ro_uri"]), unknown_immcap, data) |
---|
445 | self.failUnlessReallyEqual(data[1]["mutable"], False) |
---|
446 | elif expect_rw_uri: |
---|
447 | self.failUnlessReallyEqual(to_bytes(data[1]["ro_uri"]), unknown_rocap, data) |
---|
448 | self.failUnlessReallyEqual(data[1]["mutable"], True) |
---|
449 | else: |
---|
450 | self.failUnlessReallyEqual(to_bytes(data[1]["ro_uri"]), unknown_rocap, data) |
---|
451 | self.assertThat(data[1], Not(Contains("mutable"))) |
---|
452 | |
---|
453 | # TODO: check metadata contents |
---|
454 | self.assertThat(data[1], Contains("metadata")) |
---|
455 | |
---|
456 | d.addCallback(lambda ign: self.GET("%s/%s?t=json" % (self.rooturl, str(name)))) |
---|
457 | d.addCallback(_check_json, expect_rw_uri=not immutable) |
---|
458 | |
---|
459 | # and make sure that a read-only version of the directory can be |
---|
460 | # rendered too. This version will not have unknown_rwcap, whether |
---|
461 | # or not future_node was immutable. |
---|
462 | d.addCallback(lambda ign: self.GET(self.rourl)) |
---|
463 | if immutable: |
---|
464 | d.addCallback(_check_directory_html, b"-IMM") |
---|
465 | else: |
---|
466 | d.addCallback(_check_directory_html, b"-RO") |
---|
467 | |
---|
468 | d.addCallback(lambda ign: self.GET(self.rourl+"?t=json")) |
---|
469 | d.addCallback(_check_directory_json, expect_rw_uri=False) |
---|
470 | |
---|
471 | d.addCallback(lambda ign: self.GET("%s/%s?t=json" % (self.rourl, str(name)))) |
---|
472 | d.addCallback(_check_json, expect_rw_uri=False) |
---|
473 | |
---|
474 | # TODO: check that getting t=info from the Info link in the ro directory |
---|
475 | # works, and does not include the writecap URI. |
---|
476 | return d |
---|
477 | |
---|
478 | def test_immutable_unknown(self): |
---|
479 | return self.test_unknown(immutable=True) |
---|
480 | |
---|
481 | def test_mutant_dirnodes_are_omitted(self): |
---|
482 | self.basedir = "web/Grid/mutant_dirnodes_are_omitted" |
---|
483 | |
---|
484 | self.set_up_grid(oneshare=True) |
---|
485 | c = self.g.clients[0] |
---|
486 | nm = c.nodemaker |
---|
487 | self.uris = {} |
---|
488 | self.fileurls = {} |
---|
489 | |
---|
490 | lonely_uri = b"URI:LIT:n5xgk" # LIT for "one" |
---|
491 | mut_write_uri = b"URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq" |
---|
492 | mut_read_uri = b"URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q" |
---|
493 | |
---|
494 | # This method tests mainly dirnode, but we'd have to duplicate code in order to |
---|
495 | # test the dirnode and web layers separately. |
---|
496 | |
---|
497 | # 'lonely' is a valid LIT child, 'ro' is a mutant child with an SSK-RO readcap, |
---|
498 | # and 'write-in-ro' is a mutant child with an SSK writecap in the ro_uri field. |
---|
499 | # When the directory is read, the mutants should be silently disposed of, leaving |
---|
500 | # their lonely sibling. |
---|
501 | # We don't test the case of a retrieving a cap from the encrypted rw_uri field, |
---|
502 | # because immutable directories don't have a writecap and therefore that field |
---|
503 | # isn't (and can't be) decrypted. |
---|
504 | # TODO: The field still exists in the netstring. Technically we should check what |
---|
505 | # happens if something is put there (_unpack_contents should raise ValueError), |
---|
506 | # but that can wait. |
---|
507 | |
---|
508 | lonely_child = nm.create_from_cap(lonely_uri) |
---|
509 | mutant_ro_child = nm.create_from_cap(mut_read_uri) |
---|
510 | mutant_write_in_ro_child = nm.create_from_cap(mut_write_uri) |
---|
511 | |
---|
512 | def _by_hook_or_by_crook(): |
---|
513 | return True |
---|
514 | for n in [mutant_ro_child, mutant_write_in_ro_child]: |
---|
515 | n.is_allowed_in_immutable_directory = _by_hook_or_by_crook |
---|
516 | |
---|
517 | mutant_write_in_ro_child.get_write_uri = lambda: None |
---|
518 | mutant_write_in_ro_child.get_readonly_uri = lambda: mut_write_uri |
---|
519 | |
---|
520 | kids = {u"lonely": (lonely_child, {}), |
---|
521 | u"ro": (mutant_ro_child, {}), |
---|
522 | u"write-in-ro": (mutant_write_in_ro_child, {}), |
---|
523 | } |
---|
524 | d = c.create_immutable_dirnode(kids) |
---|
525 | |
---|
526 | def _created(dn): |
---|
527 | self.failUnless(isinstance(dn, dirnode.DirectoryNode)) |
---|
528 | self.assertThat(dn.is_mutable(), Equals(False)) |
---|
529 | self.failUnless(dn.is_readonly()) |
---|
530 | # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail. |
---|
531 | self.assertThat(hasattr(dn._node, 'get_writekey'), Equals(False)) |
---|
532 | rep = str(dn) |
---|
533 | self.assertThat(rep, Contains("RO-IMM")) |
---|
534 | cap = dn.get_cap() |
---|
535 | self.assertThat(cap.to_string(), Contains(b"CHK")) |
---|
536 | self.cap = cap |
---|
537 | self.rootnode = dn |
---|
538 | self.rooturl = "uri/" + url_quote(dn.get_uri()) |
---|
539 | return download_to_data(dn._node) |
---|
540 | d.addCallback(_created) |
---|
541 | |
---|
542 | def _check_data(data): |
---|
543 | # Decode the netstring representation of the directory to check that all children |
---|
544 | # are present. This is a bit of an abstraction violation, but there's not really |
---|
545 | # any other way to do it given that the real DirectoryNode._unpack_contents would |
---|
546 | # strip the mutant children out (which is what we're trying to test, later). |
---|
547 | position = 0 |
---|
548 | numkids = 0 |
---|
549 | while position < len(data): |
---|
550 | entries, position = split_netstring(data, 1, position) |
---|
551 | entry = entries[0] |
---|
552 | (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4) |
---|
553 | name = name_utf8.decode("utf-8") |
---|
554 | self.failUnlessEqual(rwcapdata, b"") |
---|
555 | self.assertThat(kids, Contains(name)) |
---|
556 | (expected_child, ign) = kids[name] |
---|
557 | self.failUnlessReallyEqual(ro_uri, expected_child.get_readonly_uri()) |
---|
558 | numkids += 1 |
---|
559 | |
---|
560 | self.failUnlessReallyEqual(numkids, 3) |
---|
561 | return self.rootnode.list() |
---|
562 | d.addCallback(_check_data) |
---|
563 | |
---|
564 | # Now when we use the real directory listing code, the mutants should be absent. |
---|
565 | def _check_kids(children): |
---|
566 | self.failUnlessReallyEqual(sorted(children.keys()), [u"lonely"]) |
---|
567 | lonely_node, lonely_metadata = children[u"lonely"] |
---|
568 | |
---|
569 | self.failUnlessReallyEqual(lonely_node.get_write_uri(), None) |
---|
570 | self.failUnlessReallyEqual(lonely_node.get_readonly_uri(), lonely_uri) |
---|
571 | d.addCallback(_check_kids) |
---|
572 | |
---|
573 | d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string())) |
---|
574 | d.addCallback(lambda n: n.list()) |
---|
575 | d.addCallback(_check_kids) # again with dirnode recreated from cap |
---|
576 | |
---|
577 | # Make sure the lonely child can be listed in HTML... |
---|
578 | d.addCallback(lambda ign: self.GET(self.rooturl)) |
---|
579 | def _check_html(res): |
---|
580 | soup = BeautifulSoup(res, 'html5lib') |
---|
581 | self.assertThat(res, Not(Contains(b"URI:SSK"))) |
---|
582 | found = False |
---|
583 | for td in soup.find_all(u"td"): |
---|
584 | if td.text != u"FILE": |
---|
585 | continue |
---|
586 | a = td.findNextSibling()(u"a")[0] |
---|
587 | self.assertThat(a[u"href"], Contains(url_quote(lonely_uri))) |
---|
588 | self.assertThat(a.text, Equals(u"lonely")) |
---|
589 | self.assertThat(a[u"rel"], Equals([u"noreferrer"])) |
---|
590 | self.assertThat(td.findNextSibling().findNextSibling().text, Equals(u"{}".format(len("one")))) |
---|
591 | found = True |
---|
592 | break |
---|
593 | self.assertThat(found, Equals(True)) |
---|
594 | |
---|
595 | infos = list( |
---|
596 | a[u"href"] |
---|
597 | for a in soup.find_all(u"a") |
---|
598 | if a.text == u"More Info" |
---|
599 | ) |
---|
600 | self.assertThat(infos, HasLength(1)) |
---|
601 | self.assertThat(infos[0], EndsWith(url_quote(lonely_uri) + "?t=info")) |
---|
602 | d.addCallback(_check_html) |
---|
603 | |
---|
604 | # ... and in JSON. |
---|
605 | d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json")) |
---|
606 | def _check_json(res): |
---|
607 | data = json.loads(res) |
---|
608 | self.failUnlessEqual(data[0], "dirnode") |
---|
609 | listed_children = data[1]["children"] |
---|
610 | self.failUnlessReallyEqual(sorted(listed_children.keys()), [u"lonely"]) |
---|
611 | ll_type, ll_data = listed_children[u"lonely"] |
---|
612 | self.failUnlessEqual(ll_type, "filenode") |
---|
613 | self.assertThat(ll_data, Not(Contains("rw_uri"))) |
---|
614 | self.failUnlessReallyEqual(to_bytes(ll_data["ro_uri"]), lonely_uri) |
---|
615 | d.addCallback(_check_json) |
---|
616 | return d |
---|
617 | |
---|
618 | def test_deep_check(self): |
---|
619 | self.basedir = "web/Grid/deep_check" |
---|
620 | self.set_up_grid() |
---|
621 | c0 = self.g.clients[0] |
---|
622 | self.uris = {} |
---|
623 | self.fileurls = {} |
---|
624 | DATA = b"data" * 100 |
---|
625 | d = c0.create_dirnode() |
---|
626 | def _stash_root_and_create_file(n): |
---|
627 | self.rootnode = n |
---|
628 | self.fileurls["root"] = "uri/" + url_quote(n.get_uri()) |
---|
629 | return n.add_file(u"good", upload.Data(DATA, convergence=b"")) |
---|
630 | d.addCallback(_stash_root_and_create_file) |
---|
631 | def _stash_uri(fn, which): |
---|
632 | self.uris[which] = fn.get_uri() |
---|
633 | return fn |
---|
634 | d.addCallback(_stash_uri, "good") |
---|
635 | d.addCallback(lambda ign: |
---|
636 | self.rootnode.add_file(u"small", |
---|
637 | upload.Data(b"literal", |
---|
638 | convergence=b""))) |
---|
639 | d.addCallback(_stash_uri, "small") |
---|
640 | d.addCallback(lambda ign: |
---|
641 | self.rootnode.add_file(u"sick", |
---|
642 | upload.Data(DATA+b"1", |
---|
643 | convergence=b""))) |
---|
644 | d.addCallback(_stash_uri, "sick") |
---|
645 | |
---|
646 | # this tests that deep-check and stream-manifest will ignore |
---|
647 | # UnknownNode instances. Hopefully this will also cover deep-stats. |
---|
648 | future_node = UnknownNode(unknown_rwcap, unknown_rocap) |
---|
649 | d.addCallback(lambda ign: self.rootnode.set_node(u"future", future_node)) |
---|
650 | |
---|
651 | def _clobber_shares(ignored): |
---|
652 | self.delete_shares_numbered(self.uris["sick"], [0,1]) |
---|
653 | d.addCallback(_clobber_shares) |
---|
654 | |
---|
655 | # root |
---|
656 | # root/good |
---|
657 | # root/small |
---|
658 | # root/sick |
---|
659 | # root/future |
---|
660 | |
---|
661 | d.addCallback(self.CHECK, "root", "t=stream-deep-check") |
---|
662 | def _done(res): |
---|
663 | try: |
---|
664 | units = [json.loads(line) |
---|
665 | for line in res.splitlines() |
---|
666 | if line] |
---|
667 | except ValueError: |
---|
668 | print("response is:", res) |
---|
669 | raise |
---|
670 | self.failUnlessReallyEqual(len(units), 5+1) |
---|
671 | # should be parent-first |
---|
672 | u0 = units[0] |
---|
673 | self.failUnlessEqual(u0["path"], []) |
---|
674 | self.failUnlessEqual(u0["type"], "directory") |
---|
675 | self.failUnlessReallyEqual(to_bytes(u0["cap"]), self.rootnode.get_uri()) |
---|
676 | u0cr = u0["check-results"] |
---|
677 | self.failUnlessReallyEqual(u0cr["results"]["count-happiness"], 10) |
---|
678 | self.failUnlessReallyEqual(u0cr["results"]["count-shares-good"], 10) |
---|
679 | |
---|
680 | ugood = [u for u in units |
---|
681 | if u["type"] == "file" and u["path"] == [u"good"]][0] |
---|
682 | self.failUnlessReallyEqual(to_bytes(ugood["cap"]), self.uris["good"]) |
---|
683 | ugoodcr = ugood["check-results"] |
---|
684 | self.failUnlessReallyEqual(ugoodcr["results"]["count-happiness"], 10) |
---|
685 | self.failUnlessReallyEqual(ugoodcr["results"]["count-shares-good"], 10) |
---|
686 | |
---|
687 | stats = units[-1] |
---|
688 | self.failUnlessEqual(stats["type"], "stats") |
---|
689 | s = stats["stats"] |
---|
690 | self.failUnlessReallyEqual(s["count-immutable-files"], 2) |
---|
691 | self.failUnlessReallyEqual(s["count-literal-files"], 1) |
---|
692 | self.failUnlessReallyEqual(s["count-directories"], 1) |
---|
693 | self.failUnlessReallyEqual(s["count-unknown"], 1) |
---|
694 | d.addCallback(_done) |
---|
695 | |
---|
696 | d.addCallback(self.CHECK, "root", "t=stream-manifest") |
---|
697 | def _check_manifest(res): |
---|
698 | self.failUnless(res.endswith("\n")) |
---|
699 | units = [json.loads(t) for t in res[:-1].split("\n")] |
---|
700 | self.failUnlessReallyEqual(len(units), 5+1) |
---|
701 | self.failUnlessEqual(units[-1]["type"], "stats") |
---|
702 | first = units[0] |
---|
703 | self.failUnlessEqual(first["path"], []) |
---|
704 | self.failUnlessEqual(to_bytes(first["cap"]), self.rootnode.get_uri()) |
---|
705 | self.failUnlessEqual(first["type"], "directory") |
---|
706 | stats = units[-1]["stats"] |
---|
707 | self.failUnlessReallyEqual(stats["count-immutable-files"], 2) |
---|
708 | self.failUnlessReallyEqual(stats["count-literal-files"], 1) |
---|
709 | self.failUnlessReallyEqual(stats["count-mutable-files"], 0) |
---|
710 | self.failUnlessReallyEqual(stats["count-immutable-files"], 2) |
---|
711 | self.failUnlessReallyEqual(stats["count-unknown"], 1) |
---|
712 | d.addCallback(_check_manifest) |
---|
713 | |
---|
714 | # now add root/subdir and root/subdir/grandchild, then make subdir |
---|
715 | # unrecoverable, then see what happens |
---|
716 | |
---|
717 | d.addCallback(lambda ign: |
---|
718 | self.rootnode.create_subdirectory(u"subdir")) |
---|
719 | d.addCallback(_stash_uri, "subdir") |
---|
720 | d.addCallback(lambda subdir_node: |
---|
721 | subdir_node.add_file(u"grandchild", |
---|
722 | upload.Data(DATA+b"2", |
---|
723 | convergence=b""))) |
---|
724 | d.addCallback(_stash_uri, "grandchild") |
---|
725 | |
---|
726 | d.addCallback(lambda ign: |
---|
727 | self.delete_shares_numbered(self.uris["subdir"], |
---|
728 | list(range(1, 10)))) |
---|
729 | |
---|
730 | # root |
---|
731 | # root/good |
---|
732 | # root/small |
---|
733 | # root/sick |
---|
734 | # root/future |
---|
735 | # root/subdir [unrecoverable] |
---|
736 | # root/subdir/grandchild |
---|
737 | |
---|
738 | # how should a streaming-JSON API indicate fatal error? |
---|
739 | # answer: emit ERROR: instead of a JSON string |
---|
740 | |
---|
741 | d.addCallback(self.CHECK, "root", "t=stream-manifest") |
---|
742 | def _check_broken_manifest(res): |
---|
743 | lines = res.splitlines() |
---|
744 | error_lines = [i |
---|
745 | for (i,line) in enumerate(lines) |
---|
746 | if line.startswith("ERROR:")] |
---|
747 | if not error_lines: |
---|
748 | self.fail("no ERROR: in output: %s" % (res,)) |
---|
749 | first_error = error_lines[0] |
---|
750 | error_line = lines[first_error] |
---|
751 | error_msg = lines[first_error+1:] |
---|
752 | error_msg_s = "\n".join(error_msg) + "\n" |
---|
753 | self.assertThat(error_line, Contains("ERROR: UnrecoverableFileError(no recoverable versions)")) |
---|
754 | self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback |
---|
755 | units = [json.loads(line) for line in lines[:first_error]] |
---|
756 | self.failUnlessReallyEqual(len(units), 6) # includes subdir |
---|
757 | last_unit = units[-1] |
---|
758 | self.failUnlessEqual(last_unit["path"], ["subdir"]) |
---|
759 | d.addCallback(_check_broken_manifest) |
---|
760 | |
---|
761 | d.addCallback(self.CHECK, "root", "t=stream-deep-check") |
---|
762 | def _check_broken_deepcheck(res): |
---|
763 | lines = res.splitlines() |
---|
764 | error_lines = [i |
---|
765 | for (i,line) in enumerate(lines) |
---|
766 | if line.startswith("ERROR:")] |
---|
767 | if not error_lines: |
---|
768 | self.fail("no ERROR: in output: %s" % (res,)) |
---|
769 | first_error = error_lines[0] |
---|
770 | error_line = lines[first_error] |
---|
771 | error_msg = lines[first_error+1:] |
---|
772 | error_msg_s = "\n".join(error_msg) + "\n" |
---|
773 | self.assertThat(error_line, Contains("ERROR: UnrecoverableFileError(no recoverable versions)")) |
---|
774 | self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback |
---|
775 | units = [json.loads(line) for line in lines[:first_error]] |
---|
776 | self.failUnlessReallyEqual(len(units), 6) # includes subdir |
---|
777 | last_unit = units[-1] |
---|
778 | self.failUnlessEqual(last_unit["path"], ["subdir"]) |
---|
779 | r = last_unit["check-results"]["results"] |
---|
780 | self.failUnlessReallyEqual(r["count-recoverable-versions"], 0) |
---|
781 | self.failUnlessReallyEqual(r["count-happiness"], 1) |
---|
782 | self.failUnlessReallyEqual(r["count-shares-good"], 1) |
---|
783 | self.failUnlessReallyEqual(r["recoverable"], False) |
---|
784 | d.addCallback(_check_broken_deepcheck) |
---|
785 | |
---|
786 | d.addErrback(self.explain_web_error) |
---|
787 | return d |
---|
788 | |
---|
789 | def test_deep_check_and_repair(self): |
---|
790 | self.basedir = "web/Grid/deep_check_and_repair" |
---|
791 | self.set_up_grid() |
---|
792 | c0 = self.g.clients[0] |
---|
793 | self.uris = {} |
---|
794 | self.fileurls = {} |
---|
795 | DATA = b"data" * 100 |
---|
796 | d = c0.create_dirnode() |
---|
797 | def _stash_root_and_create_file(n): |
---|
798 | self.rootnode = n |
---|
799 | self.fileurls["root"] = "uri/" + url_quote(n.get_uri()) |
---|
800 | return n.add_file(u"good", upload.Data(DATA, convergence=b"")) |
---|
801 | d.addCallback(_stash_root_and_create_file) |
---|
802 | def _stash_uri(fn, which): |
---|
803 | self.uris[which] = fn.get_uri() |
---|
804 | d.addCallback(_stash_uri, "good") |
---|
805 | d.addCallback(lambda ign: |
---|
806 | self.rootnode.add_file(u"small", |
---|
807 | upload.Data(b"literal", |
---|
808 | convergence=b""))) |
---|
809 | d.addCallback(_stash_uri, "small") |
---|
810 | d.addCallback(lambda ign: |
---|
811 | self.rootnode.add_file(u"sick", |
---|
812 | upload.Data(DATA+b"1", |
---|
813 | convergence=b""))) |
---|
814 | d.addCallback(_stash_uri, "sick") |
---|
815 | #d.addCallback(lambda ign: |
---|
816 | # self.rootnode.add_file(u"dead", |
---|
817 | # upload.Data(DATA+b"2", |
---|
818 | # convergence=b""))) |
---|
819 | #d.addCallback(_stash_uri, "dead") |
---|
820 | |
---|
821 | #d.addCallback(lambda ign: c0.create_mutable_file("mutable")) |
---|
822 | #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn)) |
---|
823 | #d.addCallback(_stash_uri, "corrupt") |
---|
824 | |
---|
825 | def _clobber_shares(ignored): |
---|
826 | good_shares = self.find_uri_shares(self.uris["good"]) |
---|
827 | self.failUnlessReallyEqual(len(good_shares), 10) |
---|
828 | sick_shares = self.find_uri_shares(self.uris["sick"]) |
---|
829 | os.unlink(sick_shares[0][2]) |
---|
830 | #dead_shares = self.find_uri_shares(self.uris["dead"]) |
---|
831 | #for i in range(1, 10): |
---|
832 | # os.unlink(dead_shares[i][2]) |
---|
833 | |
---|
834 | #c_shares = self.find_uri_shares(self.uris["corrupt"]) |
---|
835 | #cso = CorruptShareOptions() |
---|
836 | #cso.stdout = StringIO() |
---|
837 | #cso.parseOptions([c_shares[0][2]]) |
---|
838 | #corrupt_share(cso) |
---|
839 | d.addCallback(_clobber_shares) |
---|
840 | |
---|
841 | # root |
---|
842 | # root/good CHK, 10 shares |
---|
843 | # root/small LIT |
---|
844 | # root/sick CHK, 9 shares |
---|
845 | |
---|
846 | d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true") |
---|
847 | def _done(res): |
---|
848 | units = [json.loads(line) |
---|
849 | for line in res.splitlines() |
---|
850 | if line] |
---|
851 | self.failUnlessReallyEqual(len(units), 4+1) |
---|
852 | # should be parent-first |
---|
853 | u0 = units[0] |
---|
854 | self.failUnlessEqual(u0["path"], []) |
---|
855 | self.failUnlessEqual(u0["type"], "directory") |
---|
856 | self.failUnlessReallyEqual(to_bytes(u0["cap"]), self.rootnode.get_uri()) |
---|
857 | u0crr = u0["check-and-repair-results"] |
---|
858 | self.failUnlessReallyEqual(u0crr["repair-attempted"], False) |
---|
859 | self.failUnlessReallyEqual(u0crr["pre-repair-results"]["results"]["count-happiness"], 10) |
---|
860 | self.failUnlessReallyEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10) |
---|
861 | |
---|
862 | ugood = [u for u in units |
---|
863 | if u["type"] == "file" and u["path"] == [u"good"]][0] |
---|
864 | self.failUnlessEqual(to_bytes(ugood["cap"]), self.uris["good"]) |
---|
865 | ugoodcrr = ugood["check-and-repair-results"] |
---|
866 | self.failUnlessReallyEqual(ugoodcrr["repair-attempted"], False) |
---|
867 | self.failUnlessReallyEqual(ugoodcrr["pre-repair-results"]["results"]["count-happiness"], 10) |
---|
868 | self.failUnlessReallyEqual(ugoodcrr["pre-repair-results"]["results"]["count-shares-good"], 10) |
---|
869 | |
---|
870 | usick = [u for u in units |
---|
871 | if u["type"] == "file" and u["path"] == [u"sick"]][0] |
---|
872 | self.failUnlessReallyEqual(to_bytes(usick["cap"]), self.uris["sick"]) |
---|
873 | usickcrr = usick["check-and-repair-results"] |
---|
874 | self.failUnlessReallyEqual(usickcrr["repair-attempted"], True) |
---|
875 | self.failUnlessReallyEqual(usickcrr["repair-successful"], True) |
---|
876 | self.failUnlessReallyEqual(usickcrr["pre-repair-results"]["results"]["count-happiness"], 9) |
---|
877 | self.failUnlessReallyEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9) |
---|
878 | self.failUnlessReallyEqual(usickcrr["post-repair-results"]["results"]["count-happiness"], 10) |
---|
879 | self.failUnlessReallyEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10) |
---|
880 | |
---|
881 | stats = units[-1] |
---|
882 | self.failUnlessEqual(stats["type"], "stats") |
---|
883 | s = stats["stats"] |
---|
884 | self.failUnlessReallyEqual(s["count-immutable-files"], 2) |
---|
885 | self.failUnlessReallyEqual(s["count-literal-files"], 1) |
---|
886 | self.failUnlessReallyEqual(s["count-directories"], 1) |
---|
887 | d.addCallback(_done) |
---|
888 | |
---|
889 | d.addErrback(self.explain_web_error) |
---|
890 | return d |
---|
891 | |
---|
892 | def _count_leases(self, ignored, which): |
---|
893 | u = self.uris[which] |
---|
894 | shares = self.find_uri_shares(u) |
---|
895 | lease_counts = [] |
---|
896 | for shnum, serverid, fn in shares: |
---|
897 | sf = get_share_file(fn) |
---|
898 | num_leases = len(list(sf.get_leases())) |
---|
899 | lease_counts.append( (fn, num_leases) ) |
---|
900 | return lease_counts |
---|
901 | |
---|
902 | def _assert_leasecount(self, lease_counts, expected): |
---|
903 | for (fn, num_leases) in lease_counts: |
---|
904 | if num_leases != expected: |
---|
905 | self.fail("expected %d leases, have %d, on %s" % |
---|
906 | (expected, num_leases, fn)) |
---|
907 | |
---|
908 | def test_add_lease(self): |
---|
909 | self.basedir = "web/Grid/add_lease" |
---|
910 | self.set_up_grid(num_clients=2, oneshare=True) |
---|
911 | c0 = self.g.clients[0] |
---|
912 | self.uris = {} |
---|
913 | DATA = b"data" * 100 |
---|
914 | d = c0.upload(upload.Data(DATA, convergence=b"")) |
---|
915 | def _stash_uri(ur, which): |
---|
916 | self.uris[which] = ur.get_uri() |
---|
917 | d.addCallback(_stash_uri, "one") |
---|
918 | d.addCallback(lambda ign: |
---|
919 | c0.upload(upload.Data(DATA+b"1", convergence=b""))) |
---|
920 | d.addCallback(_stash_uri, "two") |
---|
921 | def _stash_mutable_uri(n, which): |
---|
922 | self.uris[which] = n.get_uri() |
---|
923 | assert isinstance(self.uris[which], bytes) |
---|
924 | d.addCallback(lambda ign: |
---|
925 | c0.create_mutable_file(publish.MutableData(DATA+b"2"))) |
---|
926 | d.addCallback(_stash_mutable_uri, "mutable") |
---|
927 | |
---|
928 | def _compute_fileurls(ignored): |
---|
929 | self.fileurls = {} |
---|
930 | for which in self.uris: |
---|
931 | self.fileurls[which] = "uri/" + url_quote(self.uris[which]) |
---|
932 | d.addCallback(_compute_fileurls) |
---|
933 | |
---|
934 | d.addCallback(self._count_leases, "one") |
---|
935 | d.addCallback(self._assert_leasecount, 1) |
---|
936 | d.addCallback(self._count_leases, "two") |
---|
937 | d.addCallback(self._assert_leasecount, 1) |
---|
938 | d.addCallback(self._count_leases, "mutable") |
---|
939 | d.addCallback(self._assert_leasecount, 1) |
---|
940 | |
---|
941 | d.addCallback(self.CHECK, "one", "t=check") # no add-lease |
---|
942 | def _got_html_good(res): |
---|
943 | self.assertThat(res, Contains("Healthy")) |
---|
944 | self.assertThat(res, Not(Contains("Not Healthy"))) |
---|
945 | d.addCallback(_got_html_good) |
---|
946 | |
---|
947 | d.addCallback(self._count_leases, "one") |
---|
948 | d.addCallback(self._assert_leasecount, 1) |
---|
949 | d.addCallback(self._count_leases, "two") |
---|
950 | d.addCallback(self._assert_leasecount, 1) |
---|
951 | d.addCallback(self._count_leases, "mutable") |
---|
952 | d.addCallback(self._assert_leasecount, 1) |
---|
953 | |
---|
954 | # this CHECK uses the original client, which uses the same |
---|
955 | # lease-secrets, so it will just renew the original lease |
---|
956 | d.addCallback(self.CHECK, "one", "t=check&add-lease=true") |
---|
957 | d.addCallback(_got_html_good) |
---|
958 | |
---|
959 | d.addCallback(self._count_leases, "one") |
---|
960 | d.addCallback(self._assert_leasecount, 1) |
---|
961 | d.addCallback(self._count_leases, "two") |
---|
962 | d.addCallback(self._assert_leasecount, 1) |
---|
963 | d.addCallback(self._count_leases, "mutable") |
---|
964 | d.addCallback(self._assert_leasecount, 1) |
---|
965 | |
---|
966 | # this CHECK uses an alternate client, which adds a second lease |
---|
967 | d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1) |
---|
968 | d.addCallback(_got_html_good) |
---|
969 | |
---|
970 | d.addCallback(self._count_leases, "one") |
---|
971 | d.addCallback(self._assert_leasecount, 2) |
---|
972 | d.addCallback(self._count_leases, "two") |
---|
973 | d.addCallback(self._assert_leasecount, 1) |
---|
974 | d.addCallback(self._count_leases, "mutable") |
---|
975 | d.addCallback(self._assert_leasecount, 1) |
---|
976 | |
---|
977 | d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true") |
---|
978 | d.addCallback(_got_html_good) |
---|
979 | |
---|
980 | d.addCallback(self._count_leases, "one") |
---|
981 | d.addCallback(self._assert_leasecount, 2) |
---|
982 | d.addCallback(self._count_leases, "two") |
---|
983 | d.addCallback(self._assert_leasecount, 1) |
---|
984 | d.addCallback(self._count_leases, "mutable") |
---|
985 | d.addCallback(self._assert_leasecount, 1) |
---|
986 | |
---|
987 | d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true", |
---|
988 | clientnum=1) |
---|
989 | d.addCallback(_got_html_good) |
---|
990 | |
---|
991 | d.addCallback(self._count_leases, "one") |
---|
992 | d.addCallback(self._assert_leasecount, 2) |
---|
993 | d.addCallback(self._count_leases, "two") |
---|
994 | d.addCallback(self._assert_leasecount, 1) |
---|
995 | d.addCallback(self._count_leases, "mutable") |
---|
996 | d.addCallback(self._assert_leasecount, 2) |
---|
997 | |
---|
998 | d.addErrback(self.explain_web_error) |
---|
999 | return d |
---|
1000 | |
---|
1001 | def test_deep_add_lease(self): |
---|
1002 | self.basedir = "web/Grid/deep_add_lease" |
---|
1003 | self.set_up_grid(num_clients=2, oneshare=True) |
---|
1004 | c0 = self.g.clients[0] |
---|
1005 | self.uris = {} |
---|
1006 | self.fileurls = {} |
---|
1007 | DATA = b"data" * 100 |
---|
1008 | d = c0.create_dirnode() |
---|
1009 | def _stash_root_and_create_file(n): |
---|
1010 | self.rootnode = n |
---|
1011 | self.uris["root"] = n.get_uri() |
---|
1012 | self.fileurls["root"] = "uri/" + url_quote(n.get_uri()) |
---|
1013 | return n.add_file(u"one", upload.Data(DATA, convergence=b"")) |
---|
1014 | d.addCallback(_stash_root_and_create_file) |
---|
1015 | def _stash_uri(fn, which): |
---|
1016 | self.uris[which] = fn.get_uri() |
---|
1017 | d.addCallback(_stash_uri, "one") |
---|
1018 | d.addCallback(lambda ign: |
---|
1019 | self.rootnode.add_file(u"small", |
---|
1020 | upload.Data(b"literal", |
---|
1021 | convergence=b""))) |
---|
1022 | d.addCallback(_stash_uri, "small") |
---|
1023 | |
---|
1024 | d.addCallback(lambda ign: |
---|
1025 | c0.create_mutable_file(publish.MutableData(b"mutable"))) |
---|
1026 | d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn)) |
---|
1027 | d.addCallback(_stash_uri, "mutable") |
---|
1028 | |
---|
1029 | d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease |
---|
1030 | def _done(res): |
---|
1031 | units = [json.loads(line) |
---|
1032 | for line in res.splitlines() |
---|
1033 | if line] |
---|
1034 | # root, one, small, mutable, stats |
---|
1035 | self.failUnlessReallyEqual(len(units), 4+1) |
---|
1036 | d.addCallback(_done) |
---|
1037 | |
---|
1038 | d.addCallback(self._count_leases, "root") |
---|
1039 | d.addCallback(self._assert_leasecount, 1) |
---|
1040 | d.addCallback(self._count_leases, "one") |
---|
1041 | d.addCallback(self._assert_leasecount, 1) |
---|
1042 | d.addCallback(self._count_leases, "mutable") |
---|
1043 | d.addCallback(self._assert_leasecount, 1) |
---|
1044 | |
---|
1045 | d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true") |
---|
1046 | d.addCallback(_done) |
---|
1047 | |
---|
1048 | d.addCallback(self._count_leases, "root") |
---|
1049 | d.addCallback(self._assert_leasecount, 1) |
---|
1050 | d.addCallback(self._count_leases, "one") |
---|
1051 | d.addCallback(self._assert_leasecount, 1) |
---|
1052 | d.addCallback(self._count_leases, "mutable") |
---|
1053 | d.addCallback(self._assert_leasecount, 1) |
---|
1054 | |
---|
1055 | d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true", |
---|
1056 | clientnum=1) |
---|
1057 | d.addCallback(_done) |
---|
1058 | |
---|
1059 | d.addCallback(self._count_leases, "root") |
---|
1060 | d.addCallback(self._assert_leasecount, 2) |
---|
1061 | d.addCallback(self._count_leases, "one") |
---|
1062 | d.addCallback(self._assert_leasecount, 2) |
---|
1063 | d.addCallback(self._count_leases, "mutable") |
---|
1064 | d.addCallback(self._assert_leasecount, 2) |
---|
1065 | |
---|
1066 | d.addErrback(self.explain_web_error) |
---|
1067 | return d |
---|
1068 | |
---|
1069 | |
---|
1070 | def test_exceptions(self): |
---|
1071 | self.basedir = "web/Grid/exceptions" |
---|
1072 | self.set_up_grid(num_clients=1, num_servers=2) |
---|
1073 | c0 = self.g.clients[0] |
---|
1074 | c0.encoding_params['happy'] = 2 |
---|
1075 | self.fileurls = {} |
---|
1076 | DATA = b"data" * 100 |
---|
1077 | d = c0.create_dirnode() |
---|
1078 | def _stash_root(n): |
---|
1079 | self.fileurls["root"] = "uri/" + url_quote(n.get_uri()) |
---|
1080 | self.fileurls["imaginary"] = self.fileurls["root"] + "/imaginary" |
---|
1081 | return n |
---|
1082 | d.addCallback(_stash_root) |
---|
1083 | d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence=b""))) |
---|
1084 | def _stash_bad(ur): |
---|
1085 | self.fileurls["1share"] = "uri/" + url_quote(ur.get_uri()) |
---|
1086 | self.delete_shares_numbered(ur.get_uri(), list(range(1,10))) |
---|
1087 | |
---|
1088 | u = uri.from_string(ur.get_uri()) |
---|
1089 | u.key = testutil.flip_bit(u.key, 0) |
---|
1090 | baduri = u.to_string() |
---|
1091 | self.fileurls["0shares"] = "uri/" + url_quote(baduri) |
---|
1092 | d.addCallback(_stash_bad) |
---|
1093 | d.addCallback(lambda ign: c0.create_dirnode()) |
---|
1094 | def _mangle_dirnode_1share(n): |
---|
1095 | u = n.get_uri() |
---|
1096 | url = self.fileurls["dir-1share"] = "uri/" + url_quote(u) |
---|
1097 | self.fileurls["dir-1share-json"] = url + "?t=json" |
---|
1098 | self.delete_shares_numbered(u, list(range(1,10))) |
---|
1099 | d.addCallback(_mangle_dirnode_1share) |
---|
1100 | d.addCallback(lambda ign: c0.create_dirnode()) |
---|
1101 | def _mangle_dirnode_0share(n): |
---|
1102 | u = n.get_uri() |
---|
1103 | url = self.fileurls["dir-0share"] = "uri/" + url_quote(u) |
---|
1104 | self.fileurls["dir-0share-json"] = url + "?t=json" |
---|
1105 | self.delete_shares_numbered(u, list(range(0,10))) |
---|
1106 | d.addCallback(_mangle_dirnode_0share) |
---|
1107 | |
---|
1108 | # NotEnoughSharesError should be reported sensibly, with a |
---|
1109 | # text/plain explanation of the problem, and perhaps some |
---|
1110 | # information on which shares *could* be found. |
---|
1111 | |
---|
1112 | d.addCallback(lambda ignored: |
---|
1113 | self.shouldHTTPError("GET unrecoverable", |
---|
1114 | 410, "Gone", "NoSharesError", |
---|
1115 | self.GET, self.fileurls["0shares"])) |
---|
1116 | def _check_zero_shares(body): |
---|
1117 | body = str(body, "utf-8") |
---|
1118 | self.assertThat(body, Not(Contains("<html>"))) |
---|
1119 | body = " ".join(body.strip().split()) |
---|
1120 | exp = ("NoSharesError: no shares could be found. " |
---|
1121 | "Zero shares usually indicates a corrupt URI, or that " |
---|
1122 | "no servers were connected, but it might also indicate " |
---|
1123 | "severe corruption. You should perform a filecheck on " |
---|
1124 | "this object to learn more. The full error message is: " |
---|
1125 | "no shares (need 3). Last failure: None") |
---|
1126 | self.assertEqual(exp, body) |
---|
1127 | d.addCallback(_check_zero_shares) |
---|
1128 | |
---|
1129 | |
---|
1130 | d.addCallback(lambda ignored: |
---|
1131 | self.shouldHTTPError("GET 1share", |
---|
1132 | 410, "Gone", "NotEnoughSharesError", |
---|
1133 | self.GET, self.fileurls["1share"])) |
---|
1134 | def _check_one_share(body): |
---|
1135 | body = str(body, "utf-8") |
---|
1136 | self.assertThat(body, Not(Contains("<html>"))) |
---|
1137 | body = " ".join(body.strip().split()) |
---|
1138 | msgbase = ("NotEnoughSharesError: This indicates that some " |
---|
1139 | "servers were unavailable, or that shares have been " |
---|
1140 | "lost to server departure, hard drive failure, or disk " |
---|
1141 | "corruption. You should perform a filecheck on " |
---|
1142 | "this object to learn more. The full error message is:" |
---|
1143 | ) |
---|
1144 | msg1 = msgbase + (" ran out of shares:" |
---|
1145 | " complete=sh0" |
---|
1146 | " pending=" |
---|
1147 | " overdue= unused= need 3. Last failure: None") |
---|
1148 | msg2 = msgbase + (" ran out of shares:" |
---|
1149 | " complete=" |
---|
1150 | " pending=Share(sh0-on-ysbz4st7)" |
---|
1151 | " overdue= unused= need 3. Last failure: None") |
---|
1152 | self.failUnless(body == msg1 or body == msg2, body) |
---|
1153 | d.addCallback(_check_one_share) |
---|
1154 | |
---|
1155 | d.addCallback(lambda ignored: |
---|
1156 | self.shouldHTTPError("GET imaginary", |
---|
1157 | 404, "Not Found", None, |
---|
1158 | self.GET, self.fileurls["imaginary"])) |
---|
1159 | def _missing_child(body): |
---|
1160 | body = str(body, "utf-8") |
---|
1161 | self.assertThat(body, Contains("No such child: imaginary")) |
---|
1162 | d.addCallback(_missing_child) |
---|
1163 | |
---|
1164 | d.addCallback(lambda ignored: self.GET_unicode(self.fileurls["dir-0share"])) |
---|
1165 | def _check_0shares_dir_html(body): |
---|
1166 | self.assertThat(body, Contains(DIR_HTML_TAG)) |
---|
1167 | # we should see the regular page, but without the child table or |
---|
1168 | # the dirops forms |
---|
1169 | body = " ".join(body.strip().split()) |
---|
1170 | self.assertThat(body, Contains('href="?t=info">More info on this directory')) |
---|
1171 | exp = ("UnrecoverableFileError: the directory (or mutable file) " |
---|
1172 | "could not be retrieved, because there were insufficient " |
---|
1173 | "good shares. This might indicate that no servers were " |
---|
1174 | "connected, insufficient servers were connected, the URI " |
---|
1175 | "was corrupt, or that shares have been lost due to server " |
---|
1176 | "departure, hard drive failure, or disk corruption. You " |
---|
1177 | "should perform a filecheck on this object to learn more.") |
---|
1178 | self.assertThat(body, Contains(exp)) |
---|
1179 | self.assertThat(body, Contains("No upload forms: directory is unreadable")) |
---|
1180 | d.addCallback(_check_0shares_dir_html) |
---|
1181 | |
---|
1182 | d.addCallback(lambda ignored: self.GET_unicode(self.fileurls["dir-1share"])) |
---|
1183 | def _check_1shares_dir_html(body): |
---|
1184 | # at some point, we'll split UnrecoverableFileError into 0-shares |
---|
1185 | # and some-shares like we did for immutable files (since there |
---|
1186 | # are different sorts of advice to offer in each case). For now, |
---|
1187 | # they present the same way. |
---|
1188 | self.assertThat(body, Contains(DIR_HTML_TAG)) |
---|
1189 | body = " ".join(body.strip().split()) |
---|
1190 | self.assertThat(body, Contains('href="?t=info">More info on this directory')) |
---|
1191 | exp = ("UnrecoverableFileError: the directory (or mutable file) " |
---|
1192 | "could not be retrieved, because there were insufficient " |
---|
1193 | "good shares. This might indicate that no servers were " |
---|
1194 | "connected, insufficient servers were connected, the URI " |
---|
1195 | "was corrupt, or that shares have been lost due to server " |
---|
1196 | "departure, hard drive failure, or disk corruption. You " |
---|
1197 | "should perform a filecheck on this object to learn more.") |
---|
1198 | self.assertThat(body, Contains(exp)) |
---|
1199 | self.assertThat(body, Contains("No upload forms: directory is unreadable")) |
---|
1200 | d.addCallback(_check_1shares_dir_html) |
---|
1201 | |
---|
1202 | d.addCallback(lambda ignored: |
---|
1203 | self.shouldHTTPError("GET dir-0share-json", |
---|
1204 | 410, "Gone", "UnrecoverableFileError", |
---|
1205 | self.GET, |
---|
1206 | self.fileurls["dir-0share-json"])) |
---|
1207 | def _check_unrecoverable_file(body): |
---|
1208 | body = str(body, "utf-8") |
---|
1209 | self.assertThat(body, Not(Contains("<html>"))) |
---|
1210 | body = " ".join(body.strip().split()) |
---|
1211 | exp = ("UnrecoverableFileError: the directory (or mutable file) " |
---|
1212 | "could not be retrieved, because there were insufficient " |
---|
1213 | "good shares. This might indicate that no servers were " |
---|
1214 | "connected, insufficient servers were connected, the URI " |
---|
1215 | "was corrupt, or that shares have been lost due to server " |
---|
1216 | "departure, hard drive failure, or disk corruption. You " |
---|
1217 | "should perform a filecheck on this object to learn more.") |
---|
1218 | self.assertThat(body, Contains(exp)) |
---|
1219 | d.addCallback(_check_unrecoverable_file) |
---|
1220 | |
---|
1221 | d.addCallback(lambda ignored: |
---|
1222 | self.shouldHTTPError("GET dir-1share-json", |
---|
1223 | 410, "Gone", "UnrecoverableFileError", |
---|
1224 | self.GET, |
---|
1225 | self.fileurls["dir-1share-json"])) |
---|
1226 | d.addCallback(_check_unrecoverable_file) |
---|
1227 | |
---|
1228 | d.addCallback(lambda ignored: |
---|
1229 | self.shouldHTTPError("GET imaginary", |
---|
1230 | 404, "Not Found", None, |
---|
1231 | self.GET, self.fileurls["imaginary"])) |
---|
1232 | |
---|
1233 | # attach a webapi child that throws a random error, to test how it |
---|
1234 | # gets rendered. |
---|
1235 | w = c0.getServiceNamed("webish") |
---|
1236 | w.root.putChild(b"ERRORBOOM", ErrorBoom()) |
---|
1237 | |
---|
1238 | # "Accept: */*" : should get a text/html stack trace |
---|
1239 | # "Accept: text/plain" : should get a text/plain stack trace |
---|
1240 | # "Accept: text/plain, application/octet-stream" : text/plain (CLI) |
---|
1241 | # no Accept header: should get a text/html stack trace |
---|
1242 | |
---|
1243 | d.addCallback(lambda ignored: |
---|
1244 | self.shouldHTTPError("GET errorboom_html", |
---|
1245 | 500, "Internal Server Error", None, |
---|
1246 | self.GET, "ERRORBOOM", |
---|
1247 | headers={"accept": "*/*"})) |
---|
1248 | def _internal_error_html1(body): |
---|
1249 | body = str(body, "utf-8") |
---|
1250 | self.assertThat("expected HTML, not '%s'" % body, Contains("<html>")) |
---|
1251 | d.addCallback(_internal_error_html1) |
---|
1252 | |
---|
1253 | d.addCallback(lambda ignored: |
---|
1254 | self.shouldHTTPError("GET errorboom_text", |
---|
1255 | 500, "Internal Server Error", None, |
---|
1256 | self.GET, "ERRORBOOM", |
---|
1257 | headers={"accept": "text/plain"})) |
---|
1258 | def _internal_error_text2(body): |
---|
1259 | body = str(body, "utf-8") |
---|
1260 | self.assertThat(body, Not(Contains("<html>"))) |
---|
1261 | self.failUnless(body.startswith("Traceback "), body) |
---|
1262 | |
---|
1263 | d.addCallback(_internal_error_text2) |
---|
1264 | |
---|
1265 | CLI_accepts = "text/plain, application/octet-stream" |
---|
1266 | d.addCallback(lambda ignored: |
---|
1267 | self.shouldHTTPError("GET errorboom_text", |
---|
1268 | 500, "Internal Server Error", None, |
---|
1269 | self.GET, "ERRORBOOM", |
---|
1270 | headers={"accept": CLI_accepts})) |
---|
1271 | def _internal_error_text3(body): |
---|
1272 | body = str(body, "utf-8") |
---|
1273 | self.assertThat(body, Not(Contains("<html>"))) |
---|
1274 | self.failUnless(body.startswith("Traceback "), body) |
---|
1275 | d.addCallback(_internal_error_text3) |
---|
1276 | |
---|
1277 | d.addCallback(lambda ignored: |
---|
1278 | self.shouldHTTPError("GET errorboom_text", |
---|
1279 | 500, "Internal Server Error", None, |
---|
1280 | self.GET, "ERRORBOOM")) |
---|
1281 | def _internal_error_html4(body): |
---|
1282 | self.assertThat(body, Contains(b"<html>")) |
---|
1283 | d.addCallback(_internal_error_html4) |
---|
1284 | |
---|
1285 | def _flush_errors(res): |
---|
1286 | # Trial: please ignore the CompletelyUnhandledError in the logs |
---|
1287 | flush_logged_errors(CompletelyUnhandledError) |
---|
1288 | return res |
---|
1289 | d.addBoth(_flush_errors) |
---|
1290 | |
---|
1291 | return d |
---|
1292 | |
---|
1293 | def test_blacklist(self): |
---|
1294 | # download from a blacklisted URI, get an error |
---|
1295 | self.basedir = "web/Grid/blacklist" |
---|
1296 | self.set_up_grid(oneshare=True) |
---|
1297 | c0 = self.g.clients[0] |
---|
1298 | fn = c0.config.get_config_path("access.blacklist") |
---|
1299 | self.uris = {} |
---|
1300 | DATA = b"off-limits " * 50 |
---|
1301 | |
---|
1302 | d = c0.upload(upload.Data(DATA, convergence=b"")) |
---|
1303 | def _stash_uri_and_create_dir(ur): |
---|
1304 | self.uri = ur.get_uri() |
---|
1305 | self.url = b"uri/"+self.uri |
---|
1306 | u = uri.from_string_filenode(self.uri) |
---|
1307 | self.si = u.get_storage_index() |
---|
1308 | childnode = c0.create_node_from_uri(self.uri, None) |
---|
1309 | return c0.create_dirnode({u"blacklisted.txt": (childnode,{}) }) |
---|
1310 | d.addCallback(_stash_uri_and_create_dir) |
---|
1311 | def _stash_dir(node): |
---|
1312 | self.dir_node = node |
---|
1313 | self.dir_uri = node.get_uri() |
---|
1314 | self.dir_url = b"uri/"+self.dir_uri |
---|
1315 | d.addCallback(_stash_dir) |
---|
1316 | d.addCallback(lambda ign: self.GET_unicode(self.dir_url, followRedirect=True)) |
---|
1317 | def _check_dir_html(body): |
---|
1318 | self.assertThat(body, Contains(DIR_HTML_TAG)) |
---|
1319 | self.assertThat(body, Contains("blacklisted.txt</a>")) |
---|
1320 | d.addCallback(_check_dir_html) |
---|
1321 | d.addCallback(lambda ign: self.GET(self.url)) |
---|
1322 | d.addCallback(lambda body: self.failUnlessEqual(DATA, body)) |
---|
1323 | |
---|
1324 | def _blacklist(ign): |
---|
1325 | f = open(fn, "w") |
---|
1326 | f.write(" # this is a comment\n") |
---|
1327 | f.write(" \n") |
---|
1328 | f.write("\n") # also exercise blank lines |
---|
1329 | f.write("%s off-limits to you\n" % (str(base32.b2a(self.si), "ascii"),)) |
---|
1330 | f.close() |
---|
1331 | # clients should be checking the blacklist each time, so we don't |
---|
1332 | # need to restart the client |
---|
1333 | d.addCallback(_blacklist) |
---|
1334 | d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_uri", |
---|
1335 | 403, "Forbidden", |
---|
1336 | "Access Prohibited: off-limits", |
---|
1337 | self.GET, self.url)) |
---|
1338 | |
---|
1339 | # We should still be able to list the parent directory, in HTML... |
---|
1340 | d.addCallback(lambda ign: self.GET_unicode(self.dir_url, followRedirect=True)) |
---|
1341 | def _check_dir_html2(body): |
---|
1342 | self.assertThat(body, Contains(DIR_HTML_TAG)) |
---|
1343 | self.assertThat(body, Contains("blacklisted.txt</strike>")) |
---|
1344 | d.addCallback(_check_dir_html2) |
---|
1345 | |
---|
1346 | # ... and in JSON (used by CLI). |
---|
1347 | d.addCallback(lambda ign: self.GET(self.dir_url+b"?t=json", followRedirect=True)) |
---|
1348 | def _check_dir_json(res): |
---|
1349 | data = json.loads(res) |
---|
1350 | self.failUnless(isinstance(data, list), data) |
---|
1351 | self.failUnlessEqual(data[0], "dirnode") |
---|
1352 | self.failUnless(isinstance(data[1], dict), data) |
---|
1353 | self.assertThat(data[1], Contains("children")) |
---|
1354 | self.assertThat(data[1]["children"], Contains("blacklisted.txt")) |
---|
1355 | childdata = data[1]["children"]["blacklisted.txt"] |
---|
1356 | self.failUnless(isinstance(childdata, list), data) |
---|
1357 | self.failUnlessEqual(childdata[0], "filenode") |
---|
1358 | self.failUnless(isinstance(childdata[1], dict), data) |
---|
1359 | d.addCallback(_check_dir_json) |
---|
1360 | |
---|
1361 | def _unblacklist(ign): |
---|
1362 | open(fn, "w").close() |
---|
1363 | # the Blacklist object watches mtime to tell when the file has |
---|
1364 | # changed, but on windows this test will run faster than the |
---|
1365 | # filesystem's mtime resolution. So we edit Blacklist.last_mtime |
---|
1366 | # to force a reload. |
---|
1367 | self.g.clients[0].blacklist.last_mtime -= 2.0 |
---|
1368 | d.addCallback(_unblacklist) |
---|
1369 | |
---|
1370 | # now a read should work |
---|
1371 | d.addCallback(lambda ign: self.GET(self.url)) |
---|
1372 | d.addCallback(lambda body: self.failUnlessEqual(DATA, body)) |
---|
1373 | |
---|
1374 | # read again to exercise the blacklist-is-unchanged logic |
---|
1375 | d.addCallback(lambda ign: self.GET(self.url)) |
---|
1376 | d.addCallback(lambda body: self.failUnlessEqual(DATA, body)) |
---|
1377 | |
---|
1378 | # now add a blacklisted directory, and make sure files under it are |
---|
1379 | # refused too |
---|
1380 | def _add_dir(ign): |
---|
1381 | childnode = c0.create_node_from_uri(self.uri, None) |
---|
1382 | return c0.create_dirnode({u"child": (childnode,{}) }) |
---|
1383 | d.addCallback(_add_dir) |
---|
1384 | def _get_dircap(dn): |
---|
1385 | self.dir_si_b32 = base32.b2a(dn.get_storage_index()) |
---|
1386 | self.dir_url_base = b"uri/"+dn.get_write_uri() |
---|
1387 | self.dir_url_json1 = b"uri/"+dn.get_write_uri()+b"?t=json" |
---|
1388 | self.dir_url_json2 = b"uri/"+dn.get_write_uri()+b"?t=json" |
---|
1389 | self.dir_url_json_ro = b"uri/"+dn.get_readonly_uri()+b"?t=json" |
---|
1390 | self.child_url = b"uri/"+dn.get_readonly_uri()+b"/child" |
---|
1391 | d.addCallback(_get_dircap) |
---|
1392 | d.addCallback(lambda ign: self.GET(self.dir_url_base, followRedirect=True)) |
---|
1393 | d.addCallback(lambda body: self.assertThat(str(body, "utf-8"), Contains(DIR_HTML_TAG))) |
---|
1394 | d.addCallback(lambda ign: self.GET(self.dir_url_json1)) |
---|
1395 | d.addCallback(lambda res: json.loads(res)) # just check it decodes |
---|
1396 | d.addCallback(lambda ign: self.GET(self.dir_url_json2)) |
---|
1397 | d.addCallback(lambda res: json.loads(res)) # just check it decodes |
---|
1398 | d.addCallback(lambda ign: self.GET(self.dir_url_json_ro)) |
---|
1399 | d.addCallback(lambda res: json.loads(res)) # just check it decodes |
---|
1400 | d.addCallback(lambda ign: self.GET(self.child_url)) |
---|
1401 | d.addCallback(lambda body: self.failUnlessEqual(DATA, body)) |
---|
1402 | |
---|
1403 | def _block_dir(ign): |
---|
1404 | f = open(fn, "wb") |
---|
1405 | f.write(b"%s %s\n" % (self.dir_si_b32, b"dir-off-limits to you")) |
---|
1406 | f.close() |
---|
1407 | self.g.clients[0].blacklist.last_mtime -= 2.0 |
---|
1408 | d.addCallback(_block_dir) |
---|
1409 | d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir base", |
---|
1410 | 403, "Forbidden", |
---|
1411 | "Access Prohibited: dir-off-limits", |
---|
1412 | self.GET, self.dir_url_base)) |
---|
1413 | d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir json1", |
---|
1414 | 403, "Forbidden", |
---|
1415 | "Access Prohibited: dir-off-limits", |
---|
1416 | self.GET, self.dir_url_json1)) |
---|
1417 | d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir json2", |
---|
1418 | 403, "Forbidden", |
---|
1419 | "Access Prohibited: dir-off-limits", |
---|
1420 | self.GET, self.dir_url_json2)) |
---|
1421 | d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir json_ro", |
---|
1422 | 403, "Forbidden", |
---|
1423 | "Access Prohibited: dir-off-limits", |
---|
1424 | self.GET, self.dir_url_json_ro)) |
---|
1425 | d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir child", |
---|
1426 | 403, "Forbidden", |
---|
1427 | "Access Prohibited: dir-off-limits", |
---|
1428 | self.GET, self.child_url)) |
---|
1429 | return d |
---|
1430 | |
---|
1431 | |
---|