1 | |
---|
2 | import os.path, re, urllib |
---|
3 | import simplejson |
---|
4 | from StringIO import StringIO |
---|
5 | from twisted.application import service |
---|
6 | from twisted.trial import unittest |
---|
7 | from twisted.internet import defer, reactor |
---|
8 | from twisted.internet.task import Clock |
---|
9 | from twisted.web import client, error, http |
---|
10 | from twisted.python import failure, log |
---|
11 | from nevow import rend |
---|
12 | from allmydata import interfaces, uri, webish, dirnode |
---|
13 | from allmydata.storage.shares import get_share_file |
---|
14 | from allmydata.storage_client import StorageFarmBroker |
---|
15 | from allmydata.immutable import upload, download |
---|
16 | from allmydata.dirnode import DirectoryNode |
---|
17 | from allmydata.nodemaker import NodeMaker |
---|
18 | from allmydata.unknown import UnknownNode |
---|
19 | from allmydata.web import status, common |
---|
20 | from allmydata.scripts.debug import CorruptShareOptions, corrupt_share |
---|
21 | from allmydata.util import fileutil, base32 |
---|
22 | from allmydata.util.consumer import download_to_data |
---|
23 | from allmydata.util.netstring import split_netstring |
---|
24 | from allmydata.util.encodingutil import to_str |
---|
25 | from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \ |
---|
26 | create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri |
---|
27 | from allmydata.interfaces import IMutableFileNode |
---|
28 | from allmydata.mutable import servermap, publish, retrieve |
---|
29 | import allmydata.test.common_util as testutil |
---|
30 | from allmydata.test.no_network import GridTestMixin |
---|
31 | from allmydata.test.common_web import HTTPClientGETFactory, \ |
---|
32 | HTTPClientHEADFactory |
---|
33 | from allmydata.client import Client, SecretHolder |
---|
34 | |
---|
35 | # create a fake uploader/downloader, and a couple of fake dirnodes, then |
---|
36 | # create a webserver that works against them |
---|
37 | |
---|
38 | timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box. |
---|
39 | |
---|
40 | unknown_rwcap = u"lafs://from_the_future_rw_\u263A".encode('utf-8') |
---|
41 | unknown_rocap = u"ro.lafs://readonly_from_the_future_ro_\u263A".encode('utf-8') |
---|
42 | unknown_immcap = u"imm.lafs://immutable_from_the_future_imm_\u263A".encode('utf-8') |
---|
43 | |
---|
44 | class FakeStatsProvider: |
---|
45 | def get_stats(self): |
---|
46 | stats = {'stats': {}, 'counters': {}} |
---|
47 | return stats |
---|
48 | |
---|
49 | class FakeNodeMaker(NodeMaker): |
---|
50 | def _create_lit(self, cap): |
---|
51 | return FakeCHKFileNode(cap) |
---|
52 | def _create_immutable(self, cap): |
---|
53 | return FakeCHKFileNode(cap) |
---|
54 | def _create_mutable(self, cap): |
---|
55 | return FakeMutableFileNode(None, None, None, None).init_from_cap(cap) |
---|
56 | def create_mutable_file(self, contents="", keysize=None): |
---|
57 | n = FakeMutableFileNode(None, None, None, None) |
---|
58 | return n.create(contents) |
---|
59 | |
---|
60 | class FakeUploader(service.Service): |
---|
61 | name = "uploader" |
---|
62 | def upload(self, uploadable, history=None): |
---|
63 | d = uploadable.get_size() |
---|
64 | d.addCallback(lambda size: uploadable.read(size)) |
---|
65 | def _got_data(datav): |
---|
66 | data = "".join(datav) |
---|
67 | n = create_chk_filenode(data) |
---|
68 | results = upload.UploadResults() |
---|
69 | results.uri = n.get_uri() |
---|
70 | return results |
---|
71 | d.addCallback(_got_data) |
---|
72 | return d |
---|
73 | def get_helper_info(self): |
---|
74 | return (None, False) |
---|
75 | |
---|
76 | class FakeHistory: |
---|
77 | _all_upload_status = [upload.UploadStatus()] |
---|
78 | _all_download_status = [download.DownloadStatus()] |
---|
79 | _all_mapupdate_statuses = [servermap.UpdateStatus()] |
---|
80 | _all_publish_statuses = [publish.PublishStatus()] |
---|
81 | _all_retrieve_statuses = [retrieve.RetrieveStatus()] |
---|
82 | |
---|
83 | def list_all_upload_statuses(self): |
---|
84 | return self._all_upload_status |
---|
85 | def list_all_download_statuses(self): |
---|
86 | return self._all_download_status |
---|
87 | def list_all_mapupdate_statuses(self): |
---|
88 | return self._all_mapupdate_statuses |
---|
89 | def list_all_publish_statuses(self): |
---|
90 | return self._all_publish_statuses |
---|
91 | def list_all_retrieve_statuses(self): |
---|
92 | return self._all_retrieve_statuses |
---|
93 | def list_all_helper_statuses(self): |
---|
94 | return [] |
---|
95 | |
---|
96 | class FakeClient(Client): |
---|
97 | def __init__(self): |
---|
98 | # don't upcall to Client.__init__, since we only want to initialize a |
---|
99 | # minimal subset |
---|
100 | service.MultiService.__init__(self) |
---|
101 | self.nodeid = "fake_nodeid" |
---|
102 | self.nickname = "fake_nickname" |
---|
103 | self.introducer_furl = "None" |
---|
104 | self.stats_provider = FakeStatsProvider() |
---|
105 | self._secret_holder = SecretHolder("lease secret", "convergence secret") |
---|
106 | self.helper = None |
---|
107 | self.convergence = "some random string" |
---|
108 | self.storage_broker = StorageFarmBroker(None, permute_peers=True) |
---|
109 | self.introducer_client = None |
---|
110 | self.history = FakeHistory() |
---|
111 | self.uploader = FakeUploader() |
---|
112 | self.uploader.setServiceParent(self) |
---|
113 | self.nodemaker = FakeNodeMaker(None, self._secret_holder, None, |
---|
114 | self.uploader, None, None, |
---|
115 | None, None) |
---|
116 | |
---|
117 | def startService(self): |
---|
118 | return service.MultiService.startService(self) |
---|
119 | def stopService(self): |
---|
120 | return service.MultiService.stopService(self) |
---|
121 | |
---|
122 | MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT |
---|
123 | |
---|
124 | class WebMixin(object): |
---|
125 | def setUp(self): |
---|
126 | self.s = FakeClient() |
---|
127 | self.s.startService() |
---|
128 | self.staticdir = self.mktemp() |
---|
129 | self.clock = Clock() |
---|
130 | self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir, |
---|
131 | clock=self.clock) |
---|
132 | self.ws.setServiceParent(self.s) |
---|
133 | self.webish_port = port = self.ws.listener._port.getHost().port |
---|
134 | self.webish_url = "http://localhost:%d" % port |
---|
135 | |
---|
136 | l = [ self.s.create_dirnode() for x in range(6) ] |
---|
137 | d = defer.DeferredList(l) |
---|
138 | def _then(res): |
---|
139 | self.public_root = res[0][1] |
---|
140 | assert interfaces.IDirectoryNode.providedBy(self.public_root), res |
---|
141 | self.public_url = "/uri/" + self.public_root.get_uri() |
---|
142 | self.private_root = res[1][1] |
---|
143 | |
---|
144 | foo = res[2][1] |
---|
145 | self._foo_node = foo |
---|
146 | self._foo_uri = foo.get_uri() |
---|
147 | self._foo_readonly_uri = foo.get_readonly_uri() |
---|
148 | self._foo_verifycap = foo.get_verify_cap().to_string() |
---|
149 | # NOTE: we ignore the deferred on all set_uri() calls, because we |
---|
150 | # know the fake nodes do these synchronously |
---|
151 | self.public_root.set_uri(u"foo", foo.get_uri(), |
---|
152 | foo.get_readonly_uri()) |
---|
153 | |
---|
154 | self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0) |
---|
155 | foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri) |
---|
156 | self._bar_txt_verifycap = n.get_verify_cap().to_string() |
---|
157 | |
---|
158 | foo.set_uri(u"empty", res[3][1].get_uri(), |
---|
159 | res[3][1].get_readonly_uri()) |
---|
160 | sub_uri = res[4][1].get_uri() |
---|
161 | self._sub_uri = sub_uri |
---|
162 | foo.set_uri(u"sub", sub_uri, sub_uri) |
---|
163 | sub = self.s.create_node_from_uri(sub_uri) |
---|
164 | |
---|
165 | _ign, n, blocking_uri = self.makefile(1) |
---|
166 | foo.set_uri(u"blockingfile", blocking_uri, blocking_uri) |
---|
167 | |
---|
168 | unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t |
---|
169 | # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I |
---|
170 | # still think of it as an umlaut |
---|
171 | foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri) |
---|
172 | |
---|
173 | _ign, n, baz_file = self.makefile(2) |
---|
174 | self._baz_file_uri = baz_file |
---|
175 | sub.set_uri(u"baz.txt", baz_file, baz_file) |
---|
176 | |
---|
177 | _ign, n, self._bad_file_uri = self.makefile(3) |
---|
178 | # this uri should not be downloadable |
---|
179 | del FakeCHKFileNode.all_contents[self._bad_file_uri] |
---|
180 | |
---|
181 | rodir = res[5][1] |
---|
182 | self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(), |
---|
183 | rodir.get_readonly_uri()) |
---|
184 | rodir.set_uri(u"nor", baz_file, baz_file) |
---|
185 | |
---|
186 | # public/ |
---|
187 | # public/foo/ |
---|
188 | # public/foo/bar.txt |
---|
189 | # public/foo/blockingfile |
---|
190 | # public/foo/empty/ |
---|
191 | # public/foo/sub/ |
---|
192 | # public/foo/sub/baz.txt |
---|
193 | # public/reedownlee/ |
---|
194 | # public/reedownlee/nor |
---|
195 | self.NEWFILE_CONTENTS = "newfile contents\n" |
---|
196 | |
---|
197 | return foo.get_metadata_for(u"bar.txt") |
---|
198 | d.addCallback(_then) |
---|
199 | def _got_metadata(metadata): |
---|
200 | self._bar_txt_metadata = metadata |
---|
201 | d.addCallback(_got_metadata) |
---|
202 | return d |
---|
203 | |
---|
204 | def makefile(self, number): |
---|
205 | contents = "contents of file %s\n" % number |
---|
206 | n = create_chk_filenode(contents) |
---|
207 | return contents, n, n.get_uri() |
---|
208 | |
---|
209 | def tearDown(self): |
---|
210 | return self.s.stopService() |
---|
211 | |
---|
212 | def failUnlessIsBarDotTxt(self, res): |
---|
213 | self.failUnlessReallyEqual(res, self.BAR_CONTENTS, res) |
---|
214 | |
---|
215 | def failUnlessIsBarJSON(self, res): |
---|
216 | data = simplejson.loads(res) |
---|
217 | self.failUnless(isinstance(data, list)) |
---|
218 | self.failUnlessEqual(data[0], "filenode") |
---|
219 | self.failUnless(isinstance(data[1], dict)) |
---|
220 | self.failIf(data[1]["mutable"]) |
---|
221 | self.failIf("rw_uri" in data[1]) # immutable |
---|
222 | self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._bar_txt_uri) |
---|
223 | self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._bar_txt_verifycap) |
---|
224 | self.failUnlessReallyEqual(data[1]["size"], len(self.BAR_CONTENTS)) |
---|
225 | |
---|
226 | def failUnlessIsFooJSON(self, res): |
---|
227 | data = simplejson.loads(res) |
---|
228 | self.failUnless(isinstance(data, list)) |
---|
229 | self.failUnlessEqual(data[0], "dirnode", res) |
---|
230 | self.failUnless(isinstance(data[1], dict)) |
---|
231 | self.failUnless(data[1]["mutable"]) |
---|
232 | self.failUnless("rw_uri" in data[1]) # mutable |
---|
233 | self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), self._foo_uri) |
---|
234 | self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._foo_readonly_uri) |
---|
235 | self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._foo_verifycap) |
---|
236 | |
---|
237 | kidnames = sorted([unicode(n) for n in data[1]["children"]]) |
---|
238 | self.failUnlessEqual(kidnames, |
---|
239 | [u"bar.txt", u"blockingfile", u"empty", |
---|
240 | u"n\u00fc.txt", u"sub"]) |
---|
241 | kids = dict( [(unicode(name),value) |
---|
242 | for (name,value) |
---|
243 | in data[1]["children"].iteritems()] ) |
---|
244 | self.failUnlessEqual(kids[u"sub"][0], "dirnode") |
---|
245 | self.failUnlessIn("metadata", kids[u"sub"][1]) |
---|
246 | self.failUnlessIn("tahoe", kids[u"sub"][1]["metadata"]) |
---|
247 | tahoe_md = kids[u"sub"][1]["metadata"]["tahoe"] |
---|
248 | self.failUnlessIn("linkcrtime", tahoe_md) |
---|
249 | self.failUnlessIn("linkmotime", tahoe_md) |
---|
250 | self.failUnlessEqual(kids[u"bar.txt"][0], "filenode") |
---|
251 | self.failUnlessReallyEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS)) |
---|
252 | self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["ro_uri"]), self._bar_txt_uri) |
---|
253 | self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["verify_uri"]), |
---|
254 | self._bar_txt_verifycap) |
---|
255 | self.failUnlessIn("metadata", kids[u"bar.txt"][1]) |
---|
256 | self.failUnlessIn("tahoe", kids[u"bar.txt"][1]["metadata"]) |
---|
257 | self.failUnlessReallyEqual(kids[u"bar.txt"][1]["metadata"]["tahoe"]["linkcrtime"], |
---|
258 | self._bar_txt_metadata["tahoe"]["linkcrtime"]) |
---|
259 | self.failUnlessReallyEqual(to_str(kids[u"n\u00fc.txt"][1]["ro_uri"]), |
---|
260 | self._bar_txt_uri) |
---|
261 | |
---|
262 | def GET(self, urlpath, followRedirect=False, return_response=False, |
---|
263 | **kwargs): |
---|
264 | # if return_response=True, this fires with (data, statuscode, |
---|
265 | # respheaders) instead of just data. |
---|
266 | assert not isinstance(urlpath, unicode) |
---|
267 | url = self.webish_url + urlpath |
---|
268 | factory = HTTPClientGETFactory(url, method="GET", |
---|
269 | followRedirect=followRedirect, **kwargs) |
---|
270 | reactor.connectTCP("localhost", self.webish_port, factory) |
---|
271 | d = factory.deferred |
---|
272 | def _got_data(data): |
---|
273 | return (data, factory.status, factory.response_headers) |
---|
274 | if return_response: |
---|
275 | d.addCallback(_got_data) |
---|
276 | return factory.deferred |
---|
277 | |
---|
278 | def HEAD(self, urlpath, return_response=False, **kwargs): |
---|
279 | # this requires some surgery, because twisted.web.client doesn't want |
---|
280 | # to give us back the response headers. |
---|
281 | factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs) |
---|
282 | reactor.connectTCP("localhost", self.webish_port, factory) |
---|
283 | d = factory.deferred |
---|
284 | def _got_data(data): |
---|
285 | return (data, factory.status, factory.response_headers) |
---|
286 | if return_response: |
---|
287 | d.addCallback(_got_data) |
---|
288 | return factory.deferred |
---|
289 | |
---|
290 | def PUT(self, urlpath, data, **kwargs): |
---|
291 | url = self.webish_url + urlpath |
---|
292 | return client.getPage(url, method="PUT", postdata=data, **kwargs) |
---|
293 | |
---|
294 | def DELETE(self, urlpath): |
---|
295 | url = self.webish_url + urlpath |
---|
296 | return client.getPage(url, method="DELETE") |
---|
297 | |
---|
298 | def POST(self, urlpath, followRedirect=False, **fields): |
---|
299 | sepbase = "boogabooga" |
---|
300 | sep = "--" + sepbase |
---|
301 | form = [] |
---|
302 | form.append(sep) |
---|
303 | form.append('Content-Disposition: form-data; name="_charset"') |
---|
304 | form.append('') |
---|
305 | form.append('UTF-8') |
---|
306 | form.append(sep) |
---|
307 | for name, value in fields.iteritems(): |
---|
308 | if isinstance(value, tuple): |
---|
309 | filename, value = value |
---|
310 | form.append('Content-Disposition: form-data; name="%s"; ' |
---|
311 | 'filename="%s"' % (name, filename.encode("utf-8"))) |
---|
312 | else: |
---|
313 | form.append('Content-Disposition: form-data; name="%s"' % name) |
---|
314 | form.append('') |
---|
315 | if isinstance(value, unicode): |
---|
316 | value = value.encode("utf-8") |
---|
317 | else: |
---|
318 | value = str(value) |
---|
319 | assert isinstance(value, str) |
---|
320 | form.append(value) |
---|
321 | form.append(sep) |
---|
322 | form[-1] += "--" |
---|
323 | body = "" |
---|
324 | headers = {} |
---|
325 | if fields: |
---|
326 | body = "\r\n".join(form) + "\r\n" |
---|
327 | headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase |
---|
328 | return self.POST2(urlpath, body, headers, followRedirect) |
---|
329 | |
---|
330 | def POST2(self, urlpath, body="", headers={}, followRedirect=False): |
---|
331 | url = self.webish_url + urlpath |
---|
332 | return client.getPage(url, method="POST", postdata=body, |
---|
333 | headers=headers, followRedirect=followRedirect) |
---|
334 | |
---|
335 | def shouldFail(self, res, expected_failure, which, |
---|
336 | substring=None, response_substring=None): |
---|
337 | if isinstance(res, failure.Failure): |
---|
338 | res.trap(expected_failure) |
---|
339 | if substring: |
---|
340 | self.failUnless(substring in str(res), |
---|
341 | "substring '%s' not in '%s'" |
---|
342 | % (substring, str(res))) |
---|
343 | if response_substring: |
---|
344 | self.failUnless(response_substring in res.value.response, |
---|
345 | "response substring '%s' not in '%s'" |
---|
346 | % (response_substring, res.value.response)) |
---|
347 | else: |
---|
348 | self.fail("%s was supposed to raise %s, not get '%s'" % |
---|
349 | (which, expected_failure, res)) |
---|
350 | |
---|
351 | def shouldFail2(self, expected_failure, which, substring, |
---|
352 | response_substring, |
---|
353 | callable, *args, **kwargs): |
---|
354 | assert substring is None or isinstance(substring, str) |
---|
355 | assert response_substring is None or isinstance(response_substring, str) |
---|
356 | d = defer.maybeDeferred(callable, *args, **kwargs) |
---|
357 | def done(res): |
---|
358 | if isinstance(res, failure.Failure): |
---|
359 | res.trap(expected_failure) |
---|
360 | if substring: |
---|
361 | self.failUnless(substring in str(res), |
---|
362 | "%s: substring '%s' not in '%s'" |
---|
363 | % (which, substring, str(res))) |
---|
364 | if response_substring: |
---|
365 | self.failUnless(response_substring in res.value.response, |
---|
366 | "%s: response substring '%s' not in '%s'" |
---|
367 | % (which, |
---|
368 | response_substring, res.value.response)) |
---|
369 | else: |
---|
370 | self.fail("%s was supposed to raise %s, not get '%s'" % |
---|
371 | (which, expected_failure, res)) |
---|
372 | d.addBoth(done) |
---|
373 | return d |
---|
374 | |
---|
375 | def should404(self, res, which): |
---|
376 | if isinstance(res, failure.Failure): |
---|
377 | res.trap(error.Error) |
---|
378 | self.failUnlessReallyEqual(res.value.status, "404") |
---|
379 | else: |
---|
380 | self.fail("%s was supposed to Error(404), not get '%s'" % |
---|
381 | (which, res)) |
---|
382 | |
---|
383 | def should302(self, res, which): |
---|
384 | if isinstance(res, failure.Failure): |
---|
385 | res.trap(error.Error) |
---|
386 | self.failUnlessReallyEqual(res.value.status, "302") |
---|
387 | else: |
---|
388 | self.fail("%s was supposed to Error(302), not get '%s'" % |
---|
389 | (which, res)) |
---|
390 | |
---|
391 | |
---|
392 | class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixin, unittest.TestCase): |
---|
393 | def test_create(self): |
---|
394 | pass |
---|
395 | |
---|
396 | def test_welcome(self): |
---|
397 | d = self.GET("/") |
---|
398 | def _check(res): |
---|
399 | self.failUnless('Welcome To Tahoe-LAFS' in res, res) |
---|
400 | |
---|
401 | self.s.basedir = 'web/test_welcome' |
---|
402 | fileutil.make_dirs("web/test_welcome") |
---|
403 | fileutil.make_dirs("web/test_welcome/private") |
---|
404 | return self.GET("/") |
---|
405 | d.addCallback(_check) |
---|
406 | return d |
---|
407 | |
---|
408 | def test_provisioning(self): |
---|
409 | d = self.GET("/provisioning/") |
---|
410 | def _check(res): |
---|
411 | self.failUnless('Provisioning Tool' in res) |
---|
412 | fields = {'filled': True, |
---|
413 | "num_users": int(50e3), |
---|
414 | "files_per_user": 1000, |
---|
415 | "space_per_user": int(1e9), |
---|
416 | "sharing_ratio": 1.0, |
---|
417 | "encoding_parameters": "3-of-10-5", |
---|
418 | "num_servers": 30, |
---|
419 | "ownership_mode": "A", |
---|
420 | "download_rate": 100, |
---|
421 | "upload_rate": 10, |
---|
422 | "delete_rate": 10, |
---|
423 | "lease_timer": 7, |
---|
424 | } |
---|
425 | return self.POST("/provisioning/", **fields) |
---|
426 | |
---|
427 | d.addCallback(_check) |
---|
428 | def _check2(res): |
---|
429 | self.failUnless('Provisioning Tool' in res) |
---|
430 | self.failUnless("Share space consumed: 167.01TB" in res) |
---|
431 | |
---|
432 | fields = {'filled': True, |
---|
433 | "num_users": int(50e6), |
---|
434 | "files_per_user": 1000, |
---|
435 | "space_per_user": int(5e9), |
---|
436 | "sharing_ratio": 1.0, |
---|
437 | "encoding_parameters": "25-of-100-50", |
---|
438 | "num_servers": 30000, |
---|
439 | "ownership_mode": "E", |
---|
440 | "drive_failure_model": "U", |
---|
441 | "drive_size": 1000, |
---|
442 | "download_rate": 1000, |
---|
443 | "upload_rate": 100, |
---|
444 | "delete_rate": 100, |
---|
445 | "lease_timer": 7, |
---|
446 | } |
---|
447 | return self.POST("/provisioning/", **fields) |
---|
448 | d.addCallback(_check2) |
---|
449 | def _check3(res): |
---|
450 | self.failUnless("Share space consumed: huge!" in res) |
---|
451 | fields = {'filled': True} |
---|
452 | return self.POST("/provisioning/", **fields) |
---|
453 | d.addCallback(_check3) |
---|
454 | def _check4(res): |
---|
455 | self.failUnless("Share space consumed:" in res) |
---|
456 | d.addCallback(_check4) |
---|
457 | return d |
---|
458 | |
---|
459 | def test_reliability_tool(self): |
---|
460 | try: |
---|
461 | from allmydata import reliability |
---|
462 | _hush_pyflakes = reliability |
---|
463 | del _hush_pyflakes |
---|
464 | except: |
---|
465 | raise unittest.SkipTest("reliability tool requires NumPy") |
---|
466 | |
---|
467 | d = self.GET("/reliability/") |
---|
468 | def _check(res): |
---|
469 | self.failUnless('Reliability Tool' in res) |
---|
470 | fields = {'drive_lifetime': "8Y", |
---|
471 | "k": "3", |
---|
472 | "R": "7", |
---|
473 | "N": "10", |
---|
474 | "delta": "100000", |
---|
475 | "check_period": "1M", |
---|
476 | "report_period": "3M", |
---|
477 | "report_span": "5Y", |
---|
478 | } |
---|
479 | return self.POST("/reliability/", **fields) |
---|
480 | |
---|
481 | d.addCallback(_check) |
---|
482 | def _check2(res): |
---|
483 | self.failUnless('Reliability Tool' in res) |
---|
484 | r = r'Probability of loss \(no maintenance\):\s+<span>0.033591' |
---|
485 | self.failUnless(re.search(r, res), res) |
---|
486 | d.addCallback(_check2) |
---|
487 | return d |
---|
488 | |
---|
489 | def test_status(self): |
---|
490 | h = self.s.get_history() |
---|
491 | dl_num = h.list_all_download_statuses()[0].get_counter() |
---|
492 | ul_num = h.list_all_upload_statuses()[0].get_counter() |
---|
493 | mu_num = h.list_all_mapupdate_statuses()[0].get_counter() |
---|
494 | pub_num = h.list_all_publish_statuses()[0].get_counter() |
---|
495 | ret_num = h.list_all_retrieve_statuses()[0].get_counter() |
---|
496 | d = self.GET("/status", followRedirect=True) |
---|
497 | def _check(res): |
---|
498 | self.failUnless('Upload and Download Status' in res, res) |
---|
499 | self.failUnless('"down-%d"' % dl_num in res, res) |
---|
500 | self.failUnless('"up-%d"' % ul_num in res, res) |
---|
501 | self.failUnless('"mapupdate-%d"' % mu_num in res, res) |
---|
502 | self.failUnless('"publish-%d"' % pub_num in res, res) |
---|
503 | self.failUnless('"retrieve-%d"' % ret_num in res, res) |
---|
504 | d.addCallback(_check) |
---|
505 | d.addCallback(lambda res: self.GET("/status/?t=json")) |
---|
506 | def _check_json(res): |
---|
507 | data = simplejson.loads(res) |
---|
508 | self.failUnless(isinstance(data, dict)) |
---|
509 | #active = data["active"] |
---|
510 | # TODO: test more. We need a way to fake an active operation |
---|
511 | # here. |
---|
512 | d.addCallback(_check_json) |
---|
513 | |
---|
514 | d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num)) |
---|
515 | def _check_dl(res): |
---|
516 | self.failUnless("File Download Status" in res, res) |
---|
517 | d.addCallback(_check_dl) |
---|
518 | d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num)) |
---|
519 | def _check_ul(res): |
---|
520 | self.failUnless("File Upload Status" in res, res) |
---|
521 | d.addCallback(_check_ul) |
---|
522 | d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num)) |
---|
523 | def _check_mapupdate(res): |
---|
524 | self.failUnless("Mutable File Servermap Update Status" in res, res) |
---|
525 | d.addCallback(_check_mapupdate) |
---|
526 | d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num)) |
---|
527 | def _check_publish(res): |
---|
528 | self.failUnless("Mutable File Publish Status" in res, res) |
---|
529 | d.addCallback(_check_publish) |
---|
530 | d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num)) |
---|
531 | def _check_retrieve(res): |
---|
532 | self.failUnless("Mutable File Retrieve Status" in res, res) |
---|
533 | d.addCallback(_check_retrieve) |
---|
534 | |
---|
535 | return d |
---|
536 | |
---|
537 | def test_status_numbers(self): |
---|
538 | drrm = status.DownloadResultsRendererMixin() |
---|
539 | self.failUnlessReallyEqual(drrm.render_time(None, None), "") |
---|
540 | self.failUnlessReallyEqual(drrm.render_time(None, 2.5), "2.50s") |
---|
541 | self.failUnlessReallyEqual(drrm.render_time(None, 0.25), "250ms") |
---|
542 | self.failUnlessReallyEqual(drrm.render_time(None, 0.0021), "2.1ms") |
---|
543 | self.failUnlessReallyEqual(drrm.render_time(None, 0.000123), "123us") |
---|
544 | self.failUnlessReallyEqual(drrm.render_rate(None, None), "") |
---|
545 | self.failUnlessReallyEqual(drrm.render_rate(None, 2500000), "2.50MBps") |
---|
546 | self.failUnlessReallyEqual(drrm.render_rate(None, 30100), "30.1kBps") |
---|
547 | self.failUnlessReallyEqual(drrm.render_rate(None, 123), "123Bps") |
---|
548 | |
---|
549 | urrm = status.UploadResultsRendererMixin() |
---|
550 | self.failUnlessReallyEqual(urrm.render_time(None, None), "") |
---|
551 | self.failUnlessReallyEqual(urrm.render_time(None, 2.5), "2.50s") |
---|
552 | self.failUnlessReallyEqual(urrm.render_time(None, 0.25), "250ms") |
---|
553 | self.failUnlessReallyEqual(urrm.render_time(None, 0.0021), "2.1ms") |
---|
554 | self.failUnlessReallyEqual(urrm.render_time(None, 0.000123), "123us") |
---|
555 | self.failUnlessReallyEqual(urrm.render_rate(None, None), "") |
---|
556 | self.failUnlessReallyEqual(urrm.render_rate(None, 2500000), "2.50MBps") |
---|
557 | self.failUnlessReallyEqual(urrm.render_rate(None, 30100), "30.1kBps") |
---|
558 | self.failUnlessReallyEqual(urrm.render_rate(None, 123), "123Bps") |
---|
559 | |
---|
560 | def test_GET_FILEURL(self): |
---|
561 | d = self.GET(self.public_url + "/foo/bar.txt") |
---|
562 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
563 | return d |
---|
564 | |
---|
565 | def test_GET_FILEURL_range(self): |
---|
566 | headers = {"range": "bytes=1-10"} |
---|
567 | d = self.GET(self.public_url + "/foo/bar.txt", headers=headers, |
---|
568 | return_response=True) |
---|
569 | def _got((res, status, headers)): |
---|
570 | self.failUnlessReallyEqual(int(status), 206) |
---|
571 | self.failUnless(headers.has_key("content-range")) |
---|
572 | self.failUnlessReallyEqual(headers["content-range"][0], |
---|
573 | "bytes 1-10/%d" % len(self.BAR_CONTENTS)) |
---|
574 | self.failUnlessReallyEqual(res, self.BAR_CONTENTS[1:11]) |
---|
575 | d.addCallback(_got) |
---|
576 | return d |
---|
577 | |
---|
578 | def test_GET_FILEURL_partial_range(self): |
---|
579 | headers = {"range": "bytes=5-"} |
---|
580 | length = len(self.BAR_CONTENTS) |
---|
581 | d = self.GET(self.public_url + "/foo/bar.txt", headers=headers, |
---|
582 | return_response=True) |
---|
583 | def _got((res, status, headers)): |
---|
584 | self.failUnlessReallyEqual(int(status), 206) |
---|
585 | self.failUnless(headers.has_key("content-range")) |
---|
586 | self.failUnlessReallyEqual(headers["content-range"][0], |
---|
587 | "bytes 5-%d/%d" % (length-1, length)) |
---|
588 | self.failUnlessReallyEqual(res, self.BAR_CONTENTS[5:]) |
---|
589 | d.addCallback(_got) |
---|
590 | return d |
---|
591 | |
---|
592 | def test_GET_FILEURL_partial_end_range(self): |
---|
593 | headers = {"range": "bytes=-5"} |
---|
594 | length = len(self.BAR_CONTENTS) |
---|
595 | d = self.GET(self.public_url + "/foo/bar.txt", headers=headers, |
---|
596 | return_response=True) |
---|
597 | def _got((res, status, headers)): |
---|
598 | self.failUnlessReallyEqual(int(status), 206) |
---|
599 | self.failUnless(headers.has_key("content-range")) |
---|
600 | self.failUnlessReallyEqual(headers["content-range"][0], |
---|
601 | "bytes %d-%d/%d" % (length-5, length-1, length)) |
---|
602 | self.failUnlessReallyEqual(res, self.BAR_CONTENTS[-5:]) |
---|
603 | d.addCallback(_got) |
---|
604 | return d |
---|
605 | |
---|
606 | def test_GET_FILEURL_partial_range_overrun(self): |
---|
607 | headers = {"range": "bytes=100-200"} |
---|
608 | d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_overrun", |
---|
609 | "416 Requested Range not satisfiable", |
---|
610 | "First beyond end of file", |
---|
611 | self.GET, self.public_url + "/foo/bar.txt", |
---|
612 | headers=headers) |
---|
613 | return d |
---|
614 | |
---|
615 | def test_HEAD_FILEURL_range(self): |
---|
616 | headers = {"range": "bytes=1-10"} |
---|
617 | d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers, |
---|
618 | return_response=True) |
---|
619 | def _got((res, status, headers)): |
---|
620 | self.failUnlessReallyEqual(res, "") |
---|
621 | self.failUnlessReallyEqual(int(status), 206) |
---|
622 | self.failUnless(headers.has_key("content-range")) |
---|
623 | self.failUnlessReallyEqual(headers["content-range"][0], |
---|
624 | "bytes 1-10/%d" % len(self.BAR_CONTENTS)) |
---|
625 | d.addCallback(_got) |
---|
626 | return d |
---|
627 | |
---|
628 | def test_HEAD_FILEURL_partial_range(self): |
---|
629 | headers = {"range": "bytes=5-"} |
---|
630 | length = len(self.BAR_CONTENTS) |
---|
631 | d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers, |
---|
632 | return_response=True) |
---|
633 | def _got((res, status, headers)): |
---|
634 | self.failUnlessReallyEqual(int(status), 206) |
---|
635 | self.failUnless(headers.has_key("content-range")) |
---|
636 | self.failUnlessReallyEqual(headers["content-range"][0], |
---|
637 | "bytes 5-%d/%d" % (length-1, length)) |
---|
638 | d.addCallback(_got) |
---|
639 | return d |
---|
640 | |
---|
641 | def test_HEAD_FILEURL_partial_end_range(self): |
---|
642 | headers = {"range": "bytes=-5"} |
---|
643 | length = len(self.BAR_CONTENTS) |
---|
644 | d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers, |
---|
645 | return_response=True) |
---|
646 | def _got((res, status, headers)): |
---|
647 | self.failUnlessReallyEqual(int(status), 206) |
---|
648 | self.failUnless(headers.has_key("content-range")) |
---|
649 | self.failUnlessReallyEqual(headers["content-range"][0], |
---|
650 | "bytes %d-%d/%d" % (length-5, length-1, length)) |
---|
651 | d.addCallback(_got) |
---|
652 | return d |
---|
653 | |
---|
654 | def test_HEAD_FILEURL_partial_range_overrun(self): |
---|
655 | headers = {"range": "bytes=100-200"} |
---|
656 | d = self.shouldFail2(error.Error, "test_HEAD_FILEURL_range_overrun", |
---|
657 | "416 Requested Range not satisfiable", |
---|
658 | "", |
---|
659 | self.HEAD, self.public_url + "/foo/bar.txt", |
---|
660 | headers=headers) |
---|
661 | return d |
---|
662 | |
---|
663 | def test_GET_FILEURL_range_bad(self): |
---|
664 | headers = {"range": "BOGUS=fizbop-quarnak"} |
---|
665 | d = self.GET(self.public_url + "/foo/bar.txt", headers=headers, |
---|
666 | return_response=True) |
---|
667 | def _got((res, status, headers)): |
---|
668 | self.failUnlessReallyEqual(int(status), 200) |
---|
669 | self.failUnless(not headers.has_key("content-range")) |
---|
670 | self.failUnlessReallyEqual(res, self.BAR_CONTENTS) |
---|
671 | d.addCallback(_got) |
---|
672 | return d |
---|
673 | |
---|
674 | def test_HEAD_FILEURL(self): |
---|
675 | d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True) |
---|
676 | def _got((res, status, headers)): |
---|
677 | self.failUnlessReallyEqual(res, "") |
---|
678 | self.failUnlessReallyEqual(headers["content-length"][0], |
---|
679 | str(len(self.BAR_CONTENTS))) |
---|
680 | self.failUnlessReallyEqual(headers["content-type"], ["text/plain"]) |
---|
681 | d.addCallback(_got) |
---|
682 | return d |
---|
683 | |
---|
684 | def test_GET_FILEURL_named(self): |
---|
685 | base = "/file/%s" % urllib.quote(self._bar_txt_uri) |
---|
686 | base2 = "/named/%s" % urllib.quote(self._bar_txt_uri) |
---|
687 | d = self.GET(base + "/@@name=/blah.txt") |
---|
688 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
689 | d.addCallback(lambda res: self.GET(base + "/blah.txt")) |
---|
690 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
691 | d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt")) |
---|
692 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
693 | d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt")) |
---|
694 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
695 | save_url = base + "?save=true&filename=blah.txt" |
---|
696 | d.addCallback(lambda res: self.GET(save_url)) |
---|
697 | d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers |
---|
698 | u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t |
---|
699 | u_fn_e = urllib.quote(u_filename.encode("utf-8")) |
---|
700 | u_url = base + "?save=true&filename=" + u_fn_e |
---|
701 | d.addCallback(lambda res: self.GET(u_url)) |
---|
702 | d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers |
---|
703 | return d |
---|
704 | |
---|
705 | def test_PUT_FILEURL_named_bad(self): |
---|
706 | base = "/file/%s" % urllib.quote(self._bar_txt_uri) |
---|
707 | d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad", |
---|
708 | "400 Bad Request", |
---|
709 | "/file can only be used with GET or HEAD", |
---|
710 | self.PUT, base + "/@@name=/blah.txt", "") |
---|
711 | return d |
---|
712 | |
---|
713 | def test_GET_DIRURL_named_bad(self): |
---|
714 | base = "/file/%s" % urllib.quote(self._foo_uri) |
---|
715 | d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad", |
---|
716 | "400 Bad Request", |
---|
717 | "is not a file-cap", |
---|
718 | self.GET, base + "/@@name=/blah.txt") |
---|
719 | return d |
---|
720 | |
---|
721 | def test_GET_slash_file_bad(self): |
---|
722 | d = self.shouldFail2(error.Error, "test_GET_slash_file_bad", |
---|
723 | "404 Not Found", |
---|
724 | "/file must be followed by a file-cap and a name", |
---|
725 | self.GET, "/file") |
---|
726 | return d |
---|
727 | |
---|
728 | def test_GET_unhandled_URI_named(self): |
---|
729 | contents, n, newuri = self.makefile(12) |
---|
730 | verifier_cap = n.get_verify_cap().to_string() |
---|
731 | base = "/file/%s" % urllib.quote(verifier_cap) |
---|
732 | # client.create_node_from_uri() can't handle verify-caps |
---|
733 | d = self.shouldFail2(error.Error, "GET_unhandled_URI_named", |
---|
734 | "400 Bad Request", "is not a file-cap", |
---|
735 | self.GET, base) |
---|
736 | return d |
---|
737 | |
---|
738 | def test_GET_unhandled_URI(self): |
---|
739 | contents, n, newuri = self.makefile(12) |
---|
740 | verifier_cap = n.get_verify_cap().to_string() |
---|
741 | base = "/uri/%s" % urllib.quote(verifier_cap) |
---|
742 | # client.create_node_from_uri() can't handle verify-caps |
---|
743 | d = self.shouldFail2(error.Error, "test_GET_unhandled_URI", |
---|
744 | "400 Bad Request", |
---|
745 | "GET unknown URI type: can only do t=info", |
---|
746 | self.GET, base) |
---|
747 | return d |
---|
748 | |
---|
749 | def test_GET_FILE_URI(self): |
---|
750 | base = "/uri/%s" % urllib.quote(self._bar_txt_uri) |
---|
751 | d = self.GET(base) |
---|
752 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
753 | return d |
---|
754 | |
---|
755 | def test_GET_FILE_URI_badchild(self): |
---|
756 | base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri) |
---|
757 | errmsg = "Files have no children, certainly not named 'boguschild'" |
---|
758 | d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild", |
---|
759 | "400 Bad Request", errmsg, |
---|
760 | self.GET, base) |
---|
761 | return d |
---|
762 | |
---|
763 | def test_PUT_FILE_URI_badchild(self): |
---|
764 | base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri) |
---|
765 | errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory" |
---|
766 | d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild", |
---|
767 | "400 Bad Request", errmsg, |
---|
768 | self.PUT, base, "") |
---|
769 | return d |
---|
770 | |
---|
771 | # TODO: version of this with a Unicode filename |
---|
772 | def test_GET_FILEURL_save(self): |
---|
773 | d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true", |
---|
774 | return_response=True) |
---|
775 | def _got((res, statuscode, headers)): |
---|
776 | content_disposition = headers["content-disposition"][0] |
---|
777 | self.failUnless(content_disposition == 'attachment; filename="bar.txt"', content_disposition) |
---|
778 | self.failUnlessIsBarDotTxt(res) |
---|
779 | d.addCallback(_got) |
---|
780 | return d |
---|
781 | |
---|
782 | def test_GET_FILEURL_missing(self): |
---|
783 | d = self.GET(self.public_url + "/foo/missing") |
---|
784 | d.addBoth(self.should404, "test_GET_FILEURL_missing") |
---|
785 | return d |
---|
786 | |
---|
787 | def test_PUT_overwrite_only_files(self): |
---|
788 | # create a directory, put a file in that directory. |
---|
789 | contents, n, filecap = self.makefile(8) |
---|
790 | d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "") |
---|
791 | d.addCallback(lambda res: |
---|
792 | self.PUT(self.public_url + "/foo/dir/file1.txt", |
---|
793 | self.NEWFILE_CONTENTS)) |
---|
794 | # try to overwrite the file with replace=only-files |
---|
795 | # (this should work) |
---|
796 | d.addCallback(lambda res: |
---|
797 | self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files", |
---|
798 | filecap)) |
---|
799 | d.addCallback(lambda res: |
---|
800 | self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict", |
---|
801 | "There was already a child by that name, and you asked me " |
---|
802 | "to not replace it", |
---|
803 | self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files", |
---|
804 | filecap)) |
---|
805 | return d |
---|
806 | |
---|
807 | def test_PUT_NEWFILEURL(self): |
---|
808 | d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS) |
---|
809 | # TODO: we lose the response code, so we can't check this |
---|
810 | #self.failUnlessReallyEqual(responsecode, 201) |
---|
811 | d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt") |
---|
812 | d.addCallback(lambda res: |
---|
813 | self.failUnlessChildContentsAre(self._foo_node, u"new.txt", |
---|
814 | self.NEWFILE_CONTENTS)) |
---|
815 | return d |
---|
816 | |
---|
817 | def test_PUT_NEWFILEURL_not_mutable(self): |
---|
818 | d = self.PUT(self.public_url + "/foo/new.txt?mutable=false", |
---|
819 | self.NEWFILE_CONTENTS) |
---|
820 | # TODO: we lose the response code, so we can't check this |
---|
821 | #self.failUnlessReallyEqual(responsecode, 201) |
---|
822 | d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt") |
---|
823 | d.addCallback(lambda res: |
---|
824 | self.failUnlessChildContentsAre(self._foo_node, u"new.txt", |
---|
825 | self.NEWFILE_CONTENTS)) |
---|
826 | return d |
---|
827 | |
---|
828 | def test_PUT_NEWFILEURL_range_bad(self): |
---|
829 | headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)} |
---|
830 | target = self.public_url + "/foo/new.txt" |
---|
831 | d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad", |
---|
832 | "501 Not Implemented", |
---|
833 | "Content-Range in PUT not yet supported", |
---|
834 | # (and certainly not for immutable files) |
---|
835 | self.PUT, target, self.NEWFILE_CONTENTS[1:11], |
---|
836 | headers=headers) |
---|
837 | d.addCallback(lambda res: |
---|
838 | self.failIfNodeHasChild(self._foo_node, u"new.txt")) |
---|
839 | return d |
---|
840 | |
---|
841 | def test_PUT_NEWFILEURL_mutable(self): |
---|
842 | d = self.PUT(self.public_url + "/foo/new.txt?mutable=true", |
---|
843 | self.NEWFILE_CONTENTS) |
---|
844 | # TODO: we lose the response code, so we can't check this |
---|
845 | #self.failUnlessReallyEqual(responsecode, 201) |
---|
846 | def _check_uri(res): |
---|
847 | u = uri.from_string_mutable_filenode(res) |
---|
848 | self.failUnless(u.is_mutable()) |
---|
849 | self.failIf(u.is_readonly()) |
---|
850 | return res |
---|
851 | d.addCallback(_check_uri) |
---|
852 | d.addCallback(self.failUnlessURIMatchesRWChild, self._foo_node, u"new.txt") |
---|
853 | d.addCallback(lambda res: |
---|
854 | self.failUnlessMutableChildContentsAre(self._foo_node, |
---|
855 | u"new.txt", |
---|
856 | self.NEWFILE_CONTENTS)) |
---|
857 | return d |
---|
858 | |
---|
859 | def test_PUT_NEWFILEURL_mutable_toobig(self): |
---|
860 | d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig", |
---|
861 | "413 Request Entity Too Large", |
---|
862 | "SDMF is limited to one segment, and 10001 > 10000", |
---|
863 | self.PUT, |
---|
864 | self.public_url + "/foo/new.txt?mutable=true", |
---|
865 | "b" * (self.s.MUTABLE_SIZELIMIT+1)) |
---|
866 | return d |
---|
867 | |
---|
868 | def test_PUT_NEWFILEURL_replace(self): |
---|
869 | d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS) |
---|
870 | # TODO: we lose the response code, so we can't check this |
---|
871 | #self.failUnlessReallyEqual(responsecode, 200) |
---|
872 | d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt") |
---|
873 | d.addCallback(lambda res: |
---|
874 | self.failUnlessChildContentsAre(self._foo_node, u"bar.txt", |
---|
875 | self.NEWFILE_CONTENTS)) |
---|
876 | return d |
---|
877 | |
---|
878 | def test_PUT_NEWFILEURL_bad_t(self): |
---|
879 | d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request", |
---|
880 | "PUT to a file: bad t=bogus", |
---|
881 | self.PUT, self.public_url + "/foo/bar.txt?t=bogus", |
---|
882 | "contents") |
---|
883 | return d |
---|
884 | |
---|
885 | def test_PUT_NEWFILEURL_no_replace(self): |
---|
886 | d = self.PUT(self.public_url + "/foo/bar.txt?replace=false", |
---|
887 | self.NEWFILE_CONTENTS) |
---|
888 | d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace", |
---|
889 | "409 Conflict", |
---|
890 | "There was already a child by that name, and you asked me " |
---|
891 | "to not replace it") |
---|
892 | return d |
---|
893 | |
---|
894 | def test_PUT_NEWFILEURL_mkdirs(self): |
---|
895 | d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS) |
---|
896 | fn = self._foo_node |
---|
897 | d.addCallback(self.failUnlessURIMatchesROChild, fn, u"newdir/new.txt") |
---|
898 | d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt")) |
---|
899 | d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir")) |
---|
900 | d.addCallback(lambda res: |
---|
901 | self.failUnlessChildContentsAre(fn, u"newdir/new.txt", |
---|
902 | self.NEWFILE_CONTENTS)) |
---|
903 | return d |
---|
904 | |
---|
905 | def test_PUT_NEWFILEURL_blocked(self): |
---|
906 | d = self.PUT(self.public_url + "/foo/blockingfile/new.txt", |
---|
907 | self.NEWFILE_CONTENTS) |
---|
908 | d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked", |
---|
909 | "409 Conflict", |
---|
910 | "Unable to create directory 'blockingfile': a file was in the way") |
---|
911 | return d |
---|
912 | |
---|
913 | def test_PUT_NEWFILEURL_emptyname(self): |
---|
914 | # an empty pathname component (i.e. a double-slash) is disallowed |
---|
915 | d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname", |
---|
916 | "400 Bad Request", |
---|
917 | "The webapi does not allow empty pathname components", |
---|
918 | self.PUT, self.public_url + "/foo//new.txt", "") |
---|
919 | return d |
---|
920 | |
---|
921 | def test_DELETE_FILEURL(self): |
---|
922 | d = self.DELETE(self.public_url + "/foo/bar.txt") |
---|
923 | d.addCallback(lambda res: |
---|
924 | self.failIfNodeHasChild(self._foo_node, u"bar.txt")) |
---|
925 | return d |
---|
926 | |
---|
927 | def test_DELETE_FILEURL_missing(self): |
---|
928 | d = self.DELETE(self.public_url + "/foo/missing") |
---|
929 | d.addBoth(self.should404, "test_DELETE_FILEURL_missing") |
---|
930 | return d |
---|
931 | |
---|
932 | def test_DELETE_FILEURL_missing2(self): |
---|
933 | d = self.DELETE(self.public_url + "/missing/missing") |
---|
934 | d.addBoth(self.should404, "test_DELETE_FILEURL_missing2") |
---|
935 | return d |
---|
936 | |
---|
937 | def failUnlessHasBarDotTxtMetadata(self, res): |
---|
938 | data = simplejson.loads(res) |
---|
939 | self.failUnless(isinstance(data, list)) |
---|
940 | self.failUnlessIn("metadata", data[1]) |
---|
941 | self.failUnlessIn("tahoe", data[1]["metadata"]) |
---|
942 | self.failUnlessIn("linkcrtime", data[1]["metadata"]["tahoe"]) |
---|
943 | self.failUnlessIn("linkmotime", data[1]["metadata"]["tahoe"]) |
---|
944 | self.failUnlessReallyEqual(data[1]["metadata"]["tahoe"]["linkcrtime"], |
---|
945 | self._bar_txt_metadata["tahoe"]["linkcrtime"]) |
---|
946 | |
---|
947 | def test_GET_FILEURL_json(self): |
---|
948 | # twisted.web.http.parse_qs ignores any query args without an '=', so |
---|
949 | # I can't do "GET /path?json", I have to do "GET /path/t=json" |
---|
950 | # instead. This may make it tricky to emulate the S3 interface |
---|
951 | # completely. |
---|
952 | d = self.GET(self.public_url + "/foo/bar.txt?t=json") |
---|
953 | def _check1(data): |
---|
954 | self.failUnlessIsBarJSON(data) |
---|
955 | self.failUnlessHasBarDotTxtMetadata(data) |
---|
956 | return |
---|
957 | d.addCallback(_check1) |
---|
958 | return d |
---|
959 | |
---|
960 | def test_GET_FILEURL_json_missing(self): |
---|
961 | d = self.GET(self.public_url + "/foo/missing?json") |
---|
962 | d.addBoth(self.should404, "test_GET_FILEURL_json_missing") |
---|
963 | return d |
---|
964 | |
---|
965 | def test_GET_FILEURL_uri(self): |
---|
966 | d = self.GET(self.public_url + "/foo/bar.txt?t=uri") |
---|
967 | def _check(res): |
---|
968 | self.failUnlessReallyEqual(res, self._bar_txt_uri) |
---|
969 | d.addCallback(_check) |
---|
970 | d.addCallback(lambda res: |
---|
971 | self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri")) |
---|
972 | def _check2(res): |
---|
973 | # for now, for files, uris and readonly-uris are the same |
---|
974 | self.failUnlessReallyEqual(res, self._bar_txt_uri) |
---|
975 | d.addCallback(_check2) |
---|
976 | return d |
---|
977 | |
---|
978 | def test_GET_FILEURL_badtype(self): |
---|
979 | d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request", |
---|
980 | "bad t=bogus", |
---|
981 | self.GET, |
---|
982 | self.public_url + "/foo/bar.txt?t=bogus") |
---|
983 | return d |
---|
984 | |
---|
985 | def test_CSS_FILE(self): |
---|
986 | d = self.GET("/tahoe_css", followRedirect=True) |
---|
987 | def _check(res): |
---|
988 | CSS_STYLE=re.compile('toolbar\s{.+text-align:\scenter.+toolbar-item.+display:\sinline',re.DOTALL) |
---|
989 | self.failUnless(CSS_STYLE.search(res), res) |
---|
990 | d.addCallback(_check) |
---|
991 | return d |
---|
992 | |
---|
993 | def test_GET_FILEURL_uri_missing(self): |
---|
994 | d = self.GET(self.public_url + "/foo/missing?t=uri") |
---|
995 | d.addBoth(self.should404, "test_GET_FILEURL_uri_missing") |
---|
996 | return d |
---|
997 | |
---|
998 | def test_GET_DIRECTORY_html_banner(self): |
---|
999 | d = self.GET(self.public_url + "/foo", followRedirect=True) |
---|
1000 | def _check(res): |
---|
1001 | self.failUnlessIn('<div class="toolbar-item"><a href="../../..">Return to Welcome page</a></div>',res) |
---|
1002 | d.addCallback(_check) |
---|
1003 | return d |
---|
1004 | |
---|
1005 | def test_GET_DIRURL(self): |
---|
1006 | # the addSlash means we get a redirect here |
---|
1007 | # from /uri/$URI/foo/ , we need ../../../ to get back to the root |
---|
1008 | ROOT = "../../.." |
---|
1009 | d = self.GET(self.public_url + "/foo", followRedirect=True) |
---|
1010 | def _check(res): |
---|
1011 | self.failUnless(('<a href="%s">Return to Welcome page' % ROOT) |
---|
1012 | in res, res) |
---|
1013 | # the FILE reference points to a URI, but it should end in bar.txt |
---|
1014 | bar_url = ("%s/file/%s/@@named=/bar.txt" % |
---|
1015 | (ROOT, urllib.quote(self._bar_txt_uri))) |
---|
1016 | get_bar = "".join([r'<td>FILE</td>', |
---|
1017 | r'\s+<td>', |
---|
1018 | r'<a href="%s">bar.txt</a>' % bar_url, |
---|
1019 | r'</td>', |
---|
1020 | r'\s+<td>%d</td>' % len(self.BAR_CONTENTS), |
---|
1021 | ]) |
---|
1022 | self.failUnless(re.search(get_bar, res), res) |
---|
1023 | for line in res.split("\n"): |
---|
1024 | # find the line that contains the delete button for bar.txt |
---|
1025 | if ("form action" in line and |
---|
1026 | 'value="delete"' in line and |
---|
1027 | 'value="bar.txt"' in line): |
---|
1028 | # the form target should use a relative URL |
---|
1029 | foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri)) |
---|
1030 | self.failUnless(('action="%s"' % foo_url) in line, line) |
---|
1031 | # and the when_done= should too |
---|
1032 | #done_url = urllib.quote(???) |
---|
1033 | #self.failUnless(('name="when_done" value="%s"' % done_url) |
---|
1034 | # in line, line) |
---|
1035 | break |
---|
1036 | else: |
---|
1037 | self.fail("unable to find delete-bar.txt line", res) |
---|
1038 | |
---|
1039 | # the DIR reference just points to a URI |
---|
1040 | sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri))) |
---|
1041 | get_sub = ((r'<td>DIR</td>') |
---|
1042 | +r'\s+<td><a href="%s">sub</a></td>' % sub_url) |
---|
1043 | self.failUnless(re.search(get_sub, res), res) |
---|
1044 | d.addCallback(_check) |
---|
1045 | |
---|
1046 | # look at a readonly directory |
---|
1047 | d.addCallback(lambda res: |
---|
1048 | self.GET(self.public_url + "/reedownlee", followRedirect=True)) |
---|
1049 | def _check2(res): |
---|
1050 | self.failUnless("(read-only)" in res, res) |
---|
1051 | self.failIf("Upload a file" in res, res) |
---|
1052 | d.addCallback(_check2) |
---|
1053 | |
---|
1054 | # and at a directory that contains a readonly directory |
---|
1055 | d.addCallback(lambda res: |
---|
1056 | self.GET(self.public_url, followRedirect=True)) |
---|
1057 | def _check3(res): |
---|
1058 | self.failUnless(re.search('<td>DIR-RO</td>' |
---|
1059 | r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res) |
---|
1060 | d.addCallback(_check3) |
---|
1061 | |
---|
1062 | # and an empty directory |
---|
1063 | d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/")) |
---|
1064 | def _check4(res): |
---|
1065 | self.failUnless("directory is empty" in res, res) |
---|
1066 | MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I) |
---|
1067 | self.failUnless(MKDIR_BUTTON_RE.search(res), res) |
---|
1068 | d.addCallback(_check4) |
---|
1069 | |
---|
1070 | # and at a literal directory |
---|
1071 | tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT |
---|
1072 | d.addCallback(lambda res: |
---|
1073 | self.GET("/uri/" + tiny_litdir_uri + "/", followRedirect=True)) |
---|
1074 | def _check5(res): |
---|
1075 | self.failUnless('(immutable)' in res, res) |
---|
1076 | self.failUnless(re.search('<td>FILE</td>' |
---|
1077 | r'\s+<td><a href="[\.\/]+/file/URI%3ALIT%3Akrugkidfnzsc4/@@named=/short">short</a></td>', res), res) |
---|
1078 | d.addCallback(_check5) |
---|
1079 | return d |
---|
1080 | |
---|
1081 | def test_GET_DIRURL_badtype(self): |
---|
1082 | d = self.shouldHTTPError("test_GET_DIRURL_badtype", |
---|
1083 | 400, "Bad Request", |
---|
1084 | "bad t=bogus", |
---|
1085 | self.GET, |
---|
1086 | self.public_url + "/foo?t=bogus") |
---|
1087 | return d |
---|
1088 | |
---|
1089 | def test_GET_DIRURL_json(self): |
---|
1090 | d = self.GET(self.public_url + "/foo?t=json") |
---|
1091 | d.addCallback(self.failUnlessIsFooJSON) |
---|
1092 | return d |
---|
1093 | |
---|
1094 | |
---|
1095 | def test_POST_DIRURL_manifest_no_ophandle(self): |
---|
1096 | d = self.shouldFail2(error.Error, |
---|
1097 | "test_POST_DIRURL_manifest_no_ophandle", |
---|
1098 | "400 Bad Request", |
---|
1099 | "slow operation requires ophandle=", |
---|
1100 | self.POST, self.public_url, t="start-manifest") |
---|
1101 | return d |
---|
1102 | |
---|
1103 | def test_POST_DIRURL_manifest(self): |
---|
1104 | d = defer.succeed(None) |
---|
1105 | def getman(ignored, output): |
---|
1106 | d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125", |
---|
1107 | followRedirect=True) |
---|
1108 | d.addCallback(self.wait_for_operation, "125") |
---|
1109 | d.addCallback(self.get_operation_results, "125", output) |
---|
1110 | return d |
---|
1111 | d.addCallback(getman, None) |
---|
1112 | def _got_html(manifest): |
---|
1113 | self.failUnless("Manifest of SI=" in manifest) |
---|
1114 | self.failUnless("<td>sub</td>" in manifest) |
---|
1115 | self.failUnless(self._sub_uri in manifest) |
---|
1116 | self.failUnless("<td>sub/baz.txt</td>" in manifest) |
---|
1117 | d.addCallback(_got_html) |
---|
1118 | |
---|
1119 | # both t=status and unadorned GET should be identical |
---|
1120 | d.addCallback(lambda res: self.GET("/operations/125")) |
---|
1121 | d.addCallback(_got_html) |
---|
1122 | |
---|
1123 | d.addCallback(getman, "html") |
---|
1124 | d.addCallback(_got_html) |
---|
1125 | d.addCallback(getman, "text") |
---|
1126 | def _got_text(manifest): |
---|
1127 | self.failUnless("\nsub " + self._sub_uri + "\n" in manifest) |
---|
1128 | self.failUnless("\nsub/baz.txt URI:CHK:" in manifest) |
---|
1129 | d.addCallback(_got_text) |
---|
1130 | d.addCallback(getman, "JSON") |
---|
1131 | def _got_json(res): |
---|
1132 | data = res["manifest"] |
---|
1133 | got = {} |
---|
1134 | for (path_list, cap) in data: |
---|
1135 | got[tuple(path_list)] = cap |
---|
1136 | self.failUnlessReallyEqual(to_str(got[(u"sub",)]), self._sub_uri) |
---|
1137 | self.failUnless((u"sub",u"baz.txt") in got) |
---|
1138 | self.failUnless("finished" in res) |
---|
1139 | self.failUnless("origin" in res) |
---|
1140 | self.failUnless("storage-index" in res) |
---|
1141 | self.failUnless("verifycaps" in res) |
---|
1142 | self.failUnless("stats" in res) |
---|
1143 | d.addCallback(_got_json) |
---|
1144 | return d |
---|
1145 | |
---|
1146 | def test_POST_DIRURL_deepsize_no_ophandle(self): |
---|
1147 | d = self.shouldFail2(error.Error, |
---|
1148 | "test_POST_DIRURL_deepsize_no_ophandle", |
---|
1149 | "400 Bad Request", |
---|
1150 | "slow operation requires ophandle=", |
---|
1151 | self.POST, self.public_url, t="start-deep-size") |
---|
1152 | return d |
---|
1153 | |
---|
1154 | def test_POST_DIRURL_deepsize(self): |
---|
1155 | d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126", |
---|
1156 | followRedirect=True) |
---|
1157 | d.addCallback(self.wait_for_operation, "126") |
---|
1158 | d.addCallback(self.get_operation_results, "126", "json") |
---|
1159 | def _got_json(data): |
---|
1160 | self.failUnlessReallyEqual(data["finished"], True) |
---|
1161 | size = data["size"] |
---|
1162 | self.failUnless(size > 1000) |
---|
1163 | d.addCallback(_got_json) |
---|
1164 | d.addCallback(self.get_operation_results, "126", "text") |
---|
1165 | def _got_text(res): |
---|
1166 | mo = re.search(r'^size: (\d+)$', res, re.M) |
---|
1167 | self.failUnless(mo, res) |
---|
1168 | size = int(mo.group(1)) |
---|
1169 | # with directories, the size varies. |
---|
1170 | self.failUnless(size > 1000) |
---|
1171 | d.addCallback(_got_text) |
---|
1172 | return d |
---|
1173 | |
---|
1174 | def test_POST_DIRURL_deepstats_no_ophandle(self): |
---|
1175 | d = self.shouldFail2(error.Error, |
---|
1176 | "test_POST_DIRURL_deepstats_no_ophandle", |
---|
1177 | "400 Bad Request", |
---|
1178 | "slow operation requires ophandle=", |
---|
1179 | self.POST, self.public_url, t="start-deep-stats") |
---|
1180 | return d |
---|
1181 | |
---|
1182 | def test_POST_DIRURL_deepstats(self): |
---|
1183 | d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127", |
---|
1184 | followRedirect=True) |
---|
1185 | d.addCallback(self.wait_for_operation, "127") |
---|
1186 | d.addCallback(self.get_operation_results, "127", "json") |
---|
1187 | def _got_json(stats): |
---|
1188 | expected = {"count-immutable-files": 3, |
---|
1189 | "count-mutable-files": 0, |
---|
1190 | "count-literal-files": 0, |
---|
1191 | "count-files": 3, |
---|
1192 | "count-directories": 3, |
---|
1193 | "size-immutable-files": 57, |
---|
1194 | "size-literal-files": 0, |
---|
1195 | #"size-directories": 1912, # varies |
---|
1196 | #"largest-directory": 1590, |
---|
1197 | "largest-directory-children": 5, |
---|
1198 | "largest-immutable-file": 19, |
---|
1199 | } |
---|
1200 | for k,v in expected.iteritems(): |
---|
1201 | self.failUnlessReallyEqual(stats[k], v, |
---|
1202 | "stats[%s] was %s, not %s" % |
---|
1203 | (k, stats[k], v)) |
---|
1204 | self.failUnlessReallyEqual(stats["size-files-histogram"], |
---|
1205 | [ [11, 31, 3] ]) |
---|
1206 | d.addCallback(_got_json) |
---|
1207 | return d |
---|
1208 | |
---|
1209 | def test_POST_DIRURL_stream_manifest(self): |
---|
1210 | d = self.POST(self.public_url + "/foo/?t=stream-manifest") |
---|
1211 | def _check(res): |
---|
1212 | self.failUnless(res.endswith("\n")) |
---|
1213 | units = [simplejson.loads(t) for t in res[:-1].split("\n")] |
---|
1214 | self.failUnlessReallyEqual(len(units), 7) |
---|
1215 | self.failUnlessEqual(units[-1]["type"], "stats") |
---|
1216 | first = units[0] |
---|
1217 | self.failUnlessEqual(first["path"], []) |
---|
1218 | self.failUnlessReallyEqual(to_str(first["cap"]), self._foo_uri) |
---|
1219 | self.failUnlessEqual(first["type"], "directory") |
---|
1220 | baz = [u for u in units[:-1] if to_str(u["cap"]) == self._baz_file_uri][0] |
---|
1221 | self.failUnlessEqual(baz["path"], ["sub", "baz.txt"]) |
---|
1222 | self.failIfEqual(baz["storage-index"], None) |
---|
1223 | self.failIfEqual(baz["verifycap"], None) |
---|
1224 | self.failIfEqual(baz["repaircap"], None) |
---|
1225 | return |
---|
1226 | d.addCallback(_check) |
---|
1227 | return d |
---|
1228 | |
---|
1229 | def test_GET_DIRURL_uri(self): |
---|
1230 | d = self.GET(self.public_url + "/foo?t=uri") |
---|
1231 | def _check(res): |
---|
1232 | self.failUnlessReallyEqual(to_str(res), self._foo_uri) |
---|
1233 | d.addCallback(_check) |
---|
1234 | return d |
---|
1235 | |
---|
1236 | def test_GET_DIRURL_readonly_uri(self): |
---|
1237 | d = self.GET(self.public_url + "/foo?t=readonly-uri") |
---|
1238 | def _check(res): |
---|
1239 | self.failUnlessReallyEqual(to_str(res), self._foo_readonly_uri) |
---|
1240 | d.addCallback(_check) |
---|
1241 | return d |
---|
1242 | |
---|
1243 | def test_PUT_NEWDIRURL(self): |
---|
1244 | d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "") |
---|
1245 | d.addCallback(lambda res: |
---|
1246 | self.failUnlessNodeHasChild(self._foo_node, u"newdir")) |
---|
1247 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
1248 | d.addCallback(self.failUnlessNodeKeysAre, []) |
---|
1249 | return d |
---|
1250 | |
---|
1251 | def test_POST_NEWDIRURL(self): |
---|
1252 | d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "") |
---|
1253 | d.addCallback(lambda res: |
---|
1254 | self.failUnlessNodeHasChild(self._foo_node, u"newdir")) |
---|
1255 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
1256 | d.addCallback(self.failUnlessNodeKeysAre, []) |
---|
1257 | return d |
---|
1258 | |
---|
1259 | def test_POST_NEWDIRURL_emptyname(self): |
---|
1260 | # an empty pathname component (i.e. a double-slash) is disallowed |
---|
1261 | d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_emptyname", |
---|
1262 | "400 Bad Request", |
---|
1263 | "The webapi does not allow empty pathname components, i.e. a double slash", |
---|
1264 | self.POST, self.public_url + "//?t=mkdir") |
---|
1265 | return d |
---|
1266 | |
---|
1267 | def test_POST_NEWDIRURL_initial_children(self): |
---|
1268 | (newkids, caps) = self._create_initial_children() |
---|
1269 | d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-with-children", |
---|
1270 | simplejson.dumps(newkids)) |
---|
1271 | def _check(uri): |
---|
1272 | n = self.s.create_node_from_uri(uri.strip()) |
---|
1273 | d2 = self.failUnlessNodeKeysAre(n, newkids.keys()) |
---|
1274 | d2.addCallback(lambda ign: |
---|
1275 | self.failUnlessROChildURIIs(n, u"child-imm", |
---|
1276 | caps['filecap1'])) |
---|
1277 | d2.addCallback(lambda ign: |
---|
1278 | self.failUnlessRWChildURIIs(n, u"child-mutable", |
---|
1279 | caps['filecap2'])) |
---|
1280 | d2.addCallback(lambda ign: |
---|
1281 | self.failUnlessROChildURIIs(n, u"child-mutable-ro", |
---|
1282 | caps['filecap3'])) |
---|
1283 | d2.addCallback(lambda ign: |
---|
1284 | self.failUnlessROChildURIIs(n, u"unknownchild-ro", |
---|
1285 | caps['unknown_rocap'])) |
---|
1286 | d2.addCallback(lambda ign: |
---|
1287 | self.failUnlessRWChildURIIs(n, u"unknownchild-rw", |
---|
1288 | caps['unknown_rwcap'])) |
---|
1289 | d2.addCallback(lambda ign: |
---|
1290 | self.failUnlessROChildURIIs(n, u"unknownchild-imm", |
---|
1291 | caps['unknown_immcap'])) |
---|
1292 | d2.addCallback(lambda ign: |
---|
1293 | self.failUnlessRWChildURIIs(n, u"dirchild", |
---|
1294 | caps['dircap'])) |
---|
1295 | d2.addCallback(lambda ign: |
---|
1296 | self.failUnlessROChildURIIs(n, u"dirchild-lit", |
---|
1297 | caps['litdircap'])) |
---|
1298 | d2.addCallback(lambda ign: |
---|
1299 | self.failUnlessROChildURIIs(n, u"dirchild-empty", |
---|
1300 | caps['emptydircap'])) |
---|
1301 | return d2 |
---|
1302 | d.addCallback(_check) |
---|
1303 | d.addCallback(lambda res: |
---|
1304 | self.failUnlessNodeHasChild(self._foo_node, u"newdir")) |
---|
1305 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
1306 | d.addCallback(self.failUnlessNodeKeysAre, newkids.keys()) |
---|
1307 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
1308 | d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1']) |
---|
1309 | return d |
---|
1310 | |
---|
1311 | def test_POST_NEWDIRURL_immutable(self): |
---|
1312 | (newkids, caps) = self._create_immutable_children() |
---|
1313 | d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable", |
---|
1314 | simplejson.dumps(newkids)) |
---|
1315 | def _check(uri): |
---|
1316 | n = self.s.create_node_from_uri(uri.strip()) |
---|
1317 | d2 = self.failUnlessNodeKeysAre(n, newkids.keys()) |
---|
1318 | d2.addCallback(lambda ign: |
---|
1319 | self.failUnlessROChildURIIs(n, u"child-imm", |
---|
1320 | caps['filecap1'])) |
---|
1321 | d2.addCallback(lambda ign: |
---|
1322 | self.failUnlessROChildURIIs(n, u"unknownchild-imm", |
---|
1323 | caps['unknown_immcap'])) |
---|
1324 | d2.addCallback(lambda ign: |
---|
1325 | self.failUnlessROChildURIIs(n, u"dirchild-imm", |
---|
1326 | caps['immdircap'])) |
---|
1327 | d2.addCallback(lambda ign: |
---|
1328 | self.failUnlessROChildURIIs(n, u"dirchild-lit", |
---|
1329 | caps['litdircap'])) |
---|
1330 | d2.addCallback(lambda ign: |
---|
1331 | self.failUnlessROChildURIIs(n, u"dirchild-empty", |
---|
1332 | caps['emptydircap'])) |
---|
1333 | return d2 |
---|
1334 | d.addCallback(_check) |
---|
1335 | d.addCallback(lambda res: |
---|
1336 | self.failUnlessNodeHasChild(self._foo_node, u"newdir")) |
---|
1337 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
1338 | d.addCallback(self.failUnlessNodeKeysAre, newkids.keys()) |
---|
1339 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
1340 | d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1']) |
---|
1341 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
1342 | d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap']) |
---|
1343 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
1344 | d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap']) |
---|
1345 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
1346 | d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap']) |
---|
1347 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
1348 | d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap']) |
---|
1349 | d.addErrback(self.explain_web_error) |
---|
1350 | return d |
---|
1351 | |
---|
1352 | def test_POST_NEWDIRURL_immutable_bad(self): |
---|
1353 | (newkids, caps) = self._create_initial_children() |
---|
1354 | d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad", |
---|
1355 | "400 Bad Request", |
---|
1356 | "needed to be immutable but was not", |
---|
1357 | self.POST2, |
---|
1358 | self.public_url + "/foo/newdir?t=mkdir-immutable", |
---|
1359 | simplejson.dumps(newkids)) |
---|
1360 | return d |
---|
1361 | |
---|
1362 | def test_PUT_NEWDIRURL_exists(self): |
---|
1363 | d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "") |
---|
1364 | d.addCallback(lambda res: |
---|
1365 | self.failUnlessNodeHasChild(self._foo_node, u"sub")) |
---|
1366 | d.addCallback(lambda res: self._foo_node.get(u"sub")) |
---|
1367 | d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"]) |
---|
1368 | return d |
---|
1369 | |
---|
1370 | def test_PUT_NEWDIRURL_blocked(self): |
---|
1371 | d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked", |
---|
1372 | "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way", |
---|
1373 | self.PUT, |
---|
1374 | self.public_url + "/foo/bar.txt/sub?t=mkdir", "") |
---|
1375 | d.addCallback(lambda res: |
---|
1376 | self.failUnlessNodeHasChild(self._foo_node, u"sub")) |
---|
1377 | d.addCallback(lambda res: self._foo_node.get(u"sub")) |
---|
1378 | d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"]) |
---|
1379 | return d |
---|
1380 | |
---|
1381 | def test_PUT_NEWDIRURL_mkdir_p(self): |
---|
1382 | d = defer.succeed(None) |
---|
1383 | d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp')) |
---|
1384 | d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp")) |
---|
1385 | d.addCallback(lambda res: self._foo_node.get(u"mkp")) |
---|
1386 | def mkdir_p(mkpnode): |
---|
1387 | url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri()) |
---|
1388 | d = self.POST(url) |
---|
1389 | def made_subsub(ssuri): |
---|
1390 | d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2") |
---|
1391 | d.addCallback(lambda ssnode: self.failUnlessReallyEqual(ssnode.get_uri(), ssuri)) |
---|
1392 | d = self.POST(url) |
---|
1393 | d.addCallback(lambda uri2: self.failUnlessReallyEqual(uri2, ssuri)) |
---|
1394 | return d |
---|
1395 | d.addCallback(made_subsub) |
---|
1396 | return d |
---|
1397 | d.addCallback(mkdir_p) |
---|
1398 | return d |
---|
1399 | |
---|
1400 | def test_PUT_NEWDIRURL_mkdirs(self): |
---|
1401 | d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "") |
---|
1402 | d.addCallback(lambda res: |
---|
1403 | self.failIfNodeHasChild(self._foo_node, u"newdir")) |
---|
1404 | d.addCallback(lambda res: |
---|
1405 | self.failUnlessNodeHasChild(self._foo_node, u"subdir")) |
---|
1406 | d.addCallback(lambda res: |
---|
1407 | self._foo_node.get_child_at_path(u"subdir/newdir")) |
---|
1408 | d.addCallback(self.failUnlessNodeKeysAre, []) |
---|
1409 | return d |
---|
1410 | |
---|
1411 | def test_DELETE_DIRURL(self): |
---|
1412 | d = self.DELETE(self.public_url + "/foo") |
---|
1413 | d.addCallback(lambda res: |
---|
1414 | self.failIfNodeHasChild(self.public_root, u"foo")) |
---|
1415 | return d |
---|
1416 | |
---|
1417 | def test_DELETE_DIRURL_missing(self): |
---|
1418 | d = self.DELETE(self.public_url + "/foo/missing") |
---|
1419 | d.addBoth(self.should404, "test_DELETE_DIRURL_missing") |
---|
1420 | d.addCallback(lambda res: |
---|
1421 | self.failUnlessNodeHasChild(self.public_root, u"foo")) |
---|
1422 | return d |
---|
1423 | |
---|
1424 | def test_DELETE_DIRURL_missing2(self): |
---|
1425 | d = self.DELETE(self.public_url + "/missing") |
---|
1426 | d.addBoth(self.should404, "test_DELETE_DIRURL_missing2") |
---|
1427 | return d |
---|
1428 | |
---|
1429 | def dump_root(self): |
---|
1430 | print "NODEWALK" |
---|
1431 | w = webish.DirnodeWalkerMixin() |
---|
1432 | def visitor(childpath, childnode, metadata): |
---|
1433 | print childpath |
---|
1434 | d = w.walk(self.public_root, visitor) |
---|
1435 | return d |
---|
1436 | |
---|
1437 | def failUnlessNodeKeysAre(self, node, expected_keys): |
---|
1438 | for k in expected_keys: |
---|
1439 | assert isinstance(k, unicode) |
---|
1440 | d = node.list() |
---|
1441 | def _check(children): |
---|
1442 | self.failUnlessReallyEqual(sorted(children.keys()), sorted(expected_keys)) |
---|
1443 | d.addCallback(_check) |
---|
1444 | return d |
---|
1445 | def failUnlessNodeHasChild(self, node, name): |
---|
1446 | assert isinstance(name, unicode) |
---|
1447 | d = node.list() |
---|
1448 | def _check(children): |
---|
1449 | self.failUnless(name in children) |
---|
1450 | d.addCallback(_check) |
---|
1451 | return d |
---|
1452 | def failIfNodeHasChild(self, node, name): |
---|
1453 | assert isinstance(name, unicode) |
---|
1454 | d = node.list() |
---|
1455 | def _check(children): |
---|
1456 | self.failIf(name in children) |
---|
1457 | d.addCallback(_check) |
---|
1458 | return d |
---|
1459 | |
---|
1460 | def failUnlessChildContentsAre(self, node, name, expected_contents): |
---|
1461 | assert isinstance(name, unicode) |
---|
1462 | d = node.get_child_at_path(name) |
---|
1463 | d.addCallback(lambda node: download_to_data(node)) |
---|
1464 | def _check(contents): |
---|
1465 | self.failUnlessReallyEqual(contents, expected_contents) |
---|
1466 | d.addCallback(_check) |
---|
1467 | return d |
---|
1468 | |
---|
1469 | def failUnlessMutableChildContentsAre(self, node, name, expected_contents): |
---|
1470 | assert isinstance(name, unicode) |
---|
1471 | d = node.get_child_at_path(name) |
---|
1472 | d.addCallback(lambda node: node.download_best_version()) |
---|
1473 | def _check(contents): |
---|
1474 | self.failUnlessReallyEqual(contents, expected_contents) |
---|
1475 | d.addCallback(_check) |
---|
1476 | return d |
---|
1477 | |
---|
1478 | def failUnlessRWChildURIIs(self, node, name, expected_uri): |
---|
1479 | assert isinstance(name, unicode) |
---|
1480 | d = node.get_child_at_path(name) |
---|
1481 | def _check(child): |
---|
1482 | self.failUnless(child.is_unknown() or not child.is_readonly()) |
---|
1483 | self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip()) |
---|
1484 | self.failUnlessReallyEqual(child.get_write_uri(), expected_uri.strip()) |
---|
1485 | expected_ro_uri = self._make_readonly(expected_uri) |
---|
1486 | if expected_ro_uri: |
---|
1487 | self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip()) |
---|
1488 | d.addCallback(_check) |
---|
1489 | return d |
---|
1490 | |
---|
1491 | def failUnlessROChildURIIs(self, node, name, expected_uri): |
---|
1492 | assert isinstance(name, unicode) |
---|
1493 | d = node.get_child_at_path(name) |
---|
1494 | def _check(child): |
---|
1495 | self.failUnless(child.is_unknown() or child.is_readonly()) |
---|
1496 | self.failUnlessReallyEqual(child.get_write_uri(), None) |
---|
1497 | self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip()) |
---|
1498 | self.failUnlessReallyEqual(child.get_readonly_uri(), expected_uri.strip()) |
---|
1499 | d.addCallback(_check) |
---|
1500 | return d |
---|
1501 | |
---|
1502 | def failUnlessURIMatchesRWChild(self, got_uri, node, name): |
---|
1503 | assert isinstance(name, unicode) |
---|
1504 | d = node.get_child_at_path(name) |
---|
1505 | def _check(child): |
---|
1506 | self.failUnless(child.is_unknown() or not child.is_readonly()) |
---|
1507 | self.failUnlessReallyEqual(child.get_uri(), got_uri.strip()) |
---|
1508 | self.failUnlessReallyEqual(child.get_write_uri(), got_uri.strip()) |
---|
1509 | expected_ro_uri = self._make_readonly(got_uri) |
---|
1510 | if expected_ro_uri: |
---|
1511 | self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip()) |
---|
1512 | d.addCallback(_check) |
---|
1513 | return d |
---|
1514 | |
---|
1515 | def failUnlessURIMatchesROChild(self, got_uri, node, name): |
---|
1516 | assert isinstance(name, unicode) |
---|
1517 | d = node.get_child_at_path(name) |
---|
1518 | def _check(child): |
---|
1519 | self.failUnless(child.is_unknown() or child.is_readonly()) |
---|
1520 | self.failUnlessReallyEqual(child.get_write_uri(), None) |
---|
1521 | self.failUnlessReallyEqual(got_uri.strip(), child.get_uri()) |
---|
1522 | self.failUnlessReallyEqual(got_uri.strip(), child.get_readonly_uri()) |
---|
1523 | d.addCallback(_check) |
---|
1524 | return d |
---|
1525 | |
---|
1526 | def failUnlessCHKURIHasContents(self, got_uri, contents): |
---|
1527 | self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents) |
---|
1528 | |
---|
1529 | def test_POST_upload(self): |
---|
1530 | d = self.POST(self.public_url + "/foo", t="upload", |
---|
1531 | file=("new.txt", self.NEWFILE_CONTENTS)) |
---|
1532 | fn = self._foo_node |
---|
1533 | d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt") |
---|
1534 | d.addCallback(lambda res: |
---|
1535 | self.failUnlessChildContentsAre(fn, u"new.txt", |
---|
1536 | self.NEWFILE_CONTENTS)) |
---|
1537 | return d |
---|
1538 | |
---|
1539 | def test_POST_upload_unicode(self): |
---|
1540 | filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t |
---|
1541 | d = self.POST(self.public_url + "/foo", t="upload", |
---|
1542 | file=(filename, self.NEWFILE_CONTENTS)) |
---|
1543 | fn = self._foo_node |
---|
1544 | d.addCallback(self.failUnlessURIMatchesROChild, fn, filename) |
---|
1545 | d.addCallback(lambda res: |
---|
1546 | self.failUnlessChildContentsAre(fn, filename, |
---|
1547 | self.NEWFILE_CONTENTS)) |
---|
1548 | target_url = self.public_url + "/foo/" + filename.encode("utf-8") |
---|
1549 | d.addCallback(lambda res: self.GET(target_url)) |
---|
1550 | d.addCallback(lambda contents: self.failUnlessReallyEqual(contents, |
---|
1551 | self.NEWFILE_CONTENTS, |
---|
1552 | contents)) |
---|
1553 | return d |
---|
1554 | |
---|
1555 | def test_POST_upload_unicode_named(self): |
---|
1556 | filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t |
---|
1557 | d = self.POST(self.public_url + "/foo", t="upload", |
---|
1558 | name=filename, |
---|
1559 | file=("overridden", self.NEWFILE_CONTENTS)) |
---|
1560 | fn = self._foo_node |
---|
1561 | d.addCallback(self.failUnlessURIMatchesROChild, fn, filename) |
---|
1562 | d.addCallback(lambda res: |
---|
1563 | self.failUnlessChildContentsAre(fn, filename, |
---|
1564 | self.NEWFILE_CONTENTS)) |
---|
1565 | target_url = self.public_url + "/foo/" + filename.encode("utf-8") |
---|
1566 | d.addCallback(lambda res: self.GET(target_url)) |
---|
1567 | d.addCallback(lambda contents: self.failUnlessReallyEqual(contents, |
---|
1568 | self.NEWFILE_CONTENTS, |
---|
1569 | contents)) |
---|
1570 | return d |
---|
1571 | |
---|
1572 | def test_POST_upload_no_link(self): |
---|
1573 | d = self.POST("/uri", t="upload", |
---|
1574 | file=("new.txt", self.NEWFILE_CONTENTS)) |
---|
1575 | def _check_upload_results(page): |
---|
1576 | # this should be a page which describes the results of the upload |
---|
1577 | # that just finished. |
---|
1578 | self.failUnless("Upload Results:" in page) |
---|
1579 | self.failUnless("URI:" in page) |
---|
1580 | uri_re = re.compile("URI: <tt><span>(.*)</span>") |
---|
1581 | mo = uri_re.search(page) |
---|
1582 | self.failUnless(mo, page) |
---|
1583 | new_uri = mo.group(1) |
---|
1584 | return new_uri |
---|
1585 | d.addCallback(_check_upload_results) |
---|
1586 | d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS) |
---|
1587 | return d |
---|
1588 | |
---|
1589 | def test_POST_upload_no_link_whendone(self): |
---|
1590 | d = self.POST("/uri", t="upload", when_done="/", |
---|
1591 | file=("new.txt", self.NEWFILE_CONTENTS)) |
---|
1592 | d.addBoth(self.shouldRedirect, "/") |
---|
1593 | return d |
---|
1594 | |
---|
1595 | def shouldRedirect2(self, which, checker, callable, *args, **kwargs): |
---|
1596 | d = defer.maybeDeferred(callable, *args, **kwargs) |
---|
1597 | def done(res): |
---|
1598 | if isinstance(res, failure.Failure): |
---|
1599 | res.trap(error.PageRedirect) |
---|
1600 | statuscode = res.value.status |
---|
1601 | target = res.value.location |
---|
1602 | return checker(statuscode, target) |
---|
1603 | self.fail("%s: callable was supposed to redirect, not return '%s'" |
---|
1604 | % (which, res)) |
---|
1605 | d.addBoth(done) |
---|
1606 | return d |
---|
1607 | |
---|
1608 | def test_POST_upload_no_link_whendone_results(self): |
---|
1609 | def check(statuscode, target): |
---|
1610 | self.failUnlessReallyEqual(statuscode, str(http.FOUND)) |
---|
1611 | self.failUnless(target.startswith(self.webish_url), target) |
---|
1612 | return client.getPage(target, method="GET") |
---|
1613 | d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results", |
---|
1614 | check, |
---|
1615 | self.POST, "/uri", t="upload", |
---|
1616 | when_done="/uri/%(uri)s", |
---|
1617 | file=("new.txt", self.NEWFILE_CONTENTS)) |
---|
1618 | d.addCallback(lambda res: |
---|
1619 | self.failUnlessReallyEqual(res, self.NEWFILE_CONTENTS)) |
---|
1620 | return d |
---|
1621 | |
---|
1622 | def test_POST_upload_no_link_mutable(self): |
---|
1623 | d = self.POST("/uri", t="upload", mutable="true", |
---|
1624 | file=("new.txt", self.NEWFILE_CONTENTS)) |
---|
1625 | def _check(filecap): |
---|
1626 | filecap = filecap.strip() |
---|
1627 | self.failUnless(filecap.startswith("URI:SSK:"), filecap) |
---|
1628 | self.filecap = filecap |
---|
1629 | u = uri.WriteableSSKFileURI.init_from_string(filecap) |
---|
1630 | self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents) |
---|
1631 | n = self.s.create_node_from_uri(filecap) |
---|
1632 | return n.download_best_version() |
---|
1633 | d.addCallback(_check) |
---|
1634 | def _check2(data): |
---|
1635 | self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS) |
---|
1636 | return self.GET("/uri/%s" % urllib.quote(self.filecap)) |
---|
1637 | d.addCallback(_check2) |
---|
1638 | def _check3(data): |
---|
1639 | self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS) |
---|
1640 | return self.GET("/file/%s" % urllib.quote(self.filecap)) |
---|
1641 | d.addCallback(_check3) |
---|
1642 | def _check4(data): |
---|
1643 | self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS) |
---|
1644 | d.addCallback(_check4) |
---|
1645 | return d |
---|
1646 | |
---|
1647 | def test_POST_upload_no_link_mutable_toobig(self): |
---|
1648 | d = self.shouldFail2(error.Error, |
---|
1649 | "test_POST_upload_no_link_mutable_toobig", |
---|
1650 | "413 Request Entity Too Large", |
---|
1651 | "SDMF is limited to one segment, and 10001 > 10000", |
---|
1652 | self.POST, |
---|
1653 | "/uri", t="upload", mutable="true", |
---|
1654 | file=("new.txt", |
---|
1655 | "b" * (self.s.MUTABLE_SIZELIMIT+1)) ) |
---|
1656 | return d |
---|
1657 | |
---|
1658 | def test_POST_upload_mutable(self): |
---|
1659 | # this creates a mutable file |
---|
1660 | d = self.POST(self.public_url + "/foo", t="upload", mutable="true", |
---|
1661 | file=("new.txt", self.NEWFILE_CONTENTS)) |
---|
1662 | fn = self._foo_node |
---|
1663 | d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt") |
---|
1664 | d.addCallback(lambda res: |
---|
1665 | self.failUnlessMutableChildContentsAre(fn, u"new.txt", |
---|
1666 | self.NEWFILE_CONTENTS)) |
---|
1667 | d.addCallback(lambda res: self._foo_node.get(u"new.txt")) |
---|
1668 | def _got(newnode): |
---|
1669 | self.failUnless(IMutableFileNode.providedBy(newnode)) |
---|
1670 | self.failUnless(newnode.is_mutable()) |
---|
1671 | self.failIf(newnode.is_readonly()) |
---|
1672 | self._mutable_node = newnode |
---|
1673 | self._mutable_uri = newnode.get_uri() |
---|
1674 | d.addCallback(_got) |
---|
1675 | |
---|
1676 | # now upload it again and make sure that the URI doesn't change |
---|
1677 | NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n" |
---|
1678 | d.addCallback(lambda res: |
---|
1679 | self.POST(self.public_url + "/foo", t="upload", |
---|
1680 | mutable="true", |
---|
1681 | file=("new.txt", NEWER_CONTENTS))) |
---|
1682 | d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt") |
---|
1683 | d.addCallback(lambda res: |
---|
1684 | self.failUnlessMutableChildContentsAre(fn, u"new.txt", |
---|
1685 | NEWER_CONTENTS)) |
---|
1686 | d.addCallback(lambda res: self._foo_node.get(u"new.txt")) |
---|
1687 | def _got2(newnode): |
---|
1688 | self.failUnless(IMutableFileNode.providedBy(newnode)) |
---|
1689 | self.failUnless(newnode.is_mutable()) |
---|
1690 | self.failIf(newnode.is_readonly()) |
---|
1691 | self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri()) |
---|
1692 | d.addCallback(_got2) |
---|
1693 | |
---|
1694 | # upload a second time, using PUT instead of POST |
---|
1695 | NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n" |
---|
1696 | d.addCallback(lambda res: |
---|
1697 | self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS)) |
---|
1698 | d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt") |
---|
1699 | d.addCallback(lambda res: |
---|
1700 | self.failUnlessMutableChildContentsAre(fn, u"new.txt", |
---|
1701 | NEW2_CONTENTS)) |
---|
1702 | |
---|
1703 | # finally list the directory, since mutable files are displayed |
---|
1704 | # slightly differently |
---|
1705 | |
---|
1706 | d.addCallback(lambda res: |
---|
1707 | self.GET(self.public_url + "/foo/", |
---|
1708 | followRedirect=True)) |
---|
1709 | def _check_page(res): |
---|
1710 | # TODO: assert more about the contents |
---|
1711 | self.failUnless("SSK" in res) |
---|
1712 | return res |
---|
1713 | d.addCallback(_check_page) |
---|
1714 | |
---|
1715 | d.addCallback(lambda res: self._foo_node.get(u"new.txt")) |
---|
1716 | def _got3(newnode): |
---|
1717 | self.failUnless(IMutableFileNode.providedBy(newnode)) |
---|
1718 | self.failUnless(newnode.is_mutable()) |
---|
1719 | self.failIf(newnode.is_readonly()) |
---|
1720 | self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri()) |
---|
1721 | d.addCallback(_got3) |
---|
1722 | |
---|
1723 | # look at the JSON form of the enclosing directory |
---|
1724 | d.addCallback(lambda res: |
---|
1725 | self.GET(self.public_url + "/foo/?t=json", |
---|
1726 | followRedirect=True)) |
---|
1727 | def _check_page_json(res): |
---|
1728 | parsed = simplejson.loads(res) |
---|
1729 | self.failUnlessEqual(parsed[0], "dirnode") |
---|
1730 | children = dict( [(unicode(name),value) |
---|
1731 | for (name,value) |
---|
1732 | in parsed[1]["children"].iteritems()] ) |
---|
1733 | self.failUnless(u"new.txt" in children) |
---|
1734 | new_json = children[u"new.txt"] |
---|
1735 | self.failUnlessEqual(new_json[0], "filenode") |
---|
1736 | self.failUnless(new_json[1]["mutable"]) |
---|
1737 | self.failUnlessReallyEqual(to_str(new_json[1]["rw_uri"]), self._mutable_uri) |
---|
1738 | ro_uri = self._mutable_node.get_readonly().to_string() |
---|
1739 | self.failUnlessReallyEqual(to_str(new_json[1]["ro_uri"]), ro_uri) |
---|
1740 | d.addCallback(_check_page_json) |
---|
1741 | |
---|
1742 | # and the JSON form of the file |
---|
1743 | d.addCallback(lambda res: |
---|
1744 | self.GET(self.public_url + "/foo/new.txt?t=json")) |
---|
1745 | def _check_file_json(res): |
---|
1746 | parsed = simplejson.loads(res) |
---|
1747 | self.failUnlessEqual(parsed[0], "filenode") |
---|
1748 | self.failUnless(parsed[1]["mutable"]) |
---|
1749 | self.failUnlessReallyEqual(to_str(parsed[1]["rw_uri"]), self._mutable_uri) |
---|
1750 | ro_uri = self._mutable_node.get_readonly().to_string() |
---|
1751 | self.failUnlessReallyEqual(to_str(parsed[1]["ro_uri"]), ro_uri) |
---|
1752 | d.addCallback(_check_file_json) |
---|
1753 | |
---|
1754 | # and look at t=uri and t=readonly-uri |
---|
1755 | d.addCallback(lambda res: |
---|
1756 | self.GET(self.public_url + "/foo/new.txt?t=uri")) |
---|
1757 | d.addCallback(lambda res: self.failUnlessReallyEqual(res, self._mutable_uri)) |
---|
1758 | d.addCallback(lambda res: |
---|
1759 | self.GET(self.public_url + "/foo/new.txt?t=readonly-uri")) |
---|
1760 | def _check_ro_uri(res): |
---|
1761 | ro_uri = self._mutable_node.get_readonly().to_string() |
---|
1762 | self.failUnlessReallyEqual(res, ro_uri) |
---|
1763 | d.addCallback(_check_ro_uri) |
---|
1764 | |
---|
1765 | # make sure we can get to it from /uri/URI |
---|
1766 | d.addCallback(lambda res: |
---|
1767 | self.GET("/uri/%s" % urllib.quote(self._mutable_uri))) |
---|
1768 | d.addCallback(lambda res: |
---|
1769 | self.failUnlessReallyEqual(res, NEW2_CONTENTS)) |
---|
1770 | |
---|
1771 | # and that HEAD computes the size correctly |
---|
1772 | d.addCallback(lambda res: |
---|
1773 | self.HEAD(self.public_url + "/foo/new.txt", |
---|
1774 | return_response=True)) |
---|
1775 | def _got_headers((res, status, headers)): |
---|
1776 | self.failUnlessReallyEqual(res, "") |
---|
1777 | self.failUnlessReallyEqual(headers["content-length"][0], |
---|
1778 | str(len(NEW2_CONTENTS))) |
---|
1779 | self.failUnlessReallyEqual(headers["content-type"], ["text/plain"]) |
---|
1780 | d.addCallback(_got_headers) |
---|
1781 | |
---|
1782 | # make sure that size errors are displayed correctly for overwrite |
---|
1783 | d.addCallback(lambda res: |
---|
1784 | self.shouldFail2(error.Error, |
---|
1785 | "test_POST_upload_mutable-toobig", |
---|
1786 | "413 Request Entity Too Large", |
---|
1787 | "SDMF is limited to one segment, and 10001 > 10000", |
---|
1788 | self.POST, |
---|
1789 | self.public_url + "/foo", t="upload", |
---|
1790 | mutable="true", |
---|
1791 | file=("new.txt", |
---|
1792 | "b" * (self.s.MUTABLE_SIZELIMIT+1)), |
---|
1793 | )) |
---|
1794 | |
---|
1795 | d.addErrback(self.dump_error) |
---|
1796 | return d |
---|
1797 | |
---|
1798 | def test_POST_upload_mutable_toobig(self): |
---|
1799 | d = self.shouldFail2(error.Error, |
---|
1800 | "test_POST_upload_mutable_toobig", |
---|
1801 | "413 Request Entity Too Large", |
---|
1802 | "SDMF is limited to one segment, and 10001 > 10000", |
---|
1803 | self.POST, |
---|
1804 | self.public_url + "/foo", |
---|
1805 | t="upload", mutable="true", |
---|
1806 | file=("new.txt", |
---|
1807 | "b" * (self.s.MUTABLE_SIZELIMIT+1)) ) |
---|
1808 | return d |
---|
1809 | |
---|
1810 | def dump_error(self, f): |
---|
1811 | # if the web server returns an error code (like 400 Bad Request), |
---|
1812 | # web.client.getPage puts the HTTP response body into the .response |
---|
1813 | # attribute of the exception object that it gives back. It does not |
---|
1814 | # appear in the Failure's repr(), so the ERROR that trial displays |
---|
1815 | # will be rather terse and unhelpful. addErrback this method to the |
---|
1816 | # end of your chain to get more information out of these errors. |
---|
1817 | if f.check(error.Error): |
---|
1818 | print "web.error.Error:" |
---|
1819 | print f |
---|
1820 | print f.value.response |
---|
1821 | return f |
---|
1822 | |
---|
1823 | def test_POST_upload_replace(self): |
---|
1824 | d = self.POST(self.public_url + "/foo", t="upload", |
---|
1825 | file=("bar.txt", self.NEWFILE_CONTENTS)) |
---|
1826 | fn = self._foo_node |
---|
1827 | d.addCallback(self.failUnlessURIMatchesROChild, fn, u"bar.txt") |
---|
1828 | d.addCallback(lambda res: |
---|
1829 | self.failUnlessChildContentsAre(fn, u"bar.txt", |
---|
1830 | self.NEWFILE_CONTENTS)) |
---|
1831 | return d |
---|
1832 | |
---|
1833 | def test_POST_upload_no_replace_ok(self): |
---|
1834 | d = self.POST(self.public_url + "/foo?replace=false", t="upload", |
---|
1835 | file=("new.txt", self.NEWFILE_CONTENTS)) |
---|
1836 | d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt")) |
---|
1837 | d.addCallback(lambda res: self.failUnlessReallyEqual(res, |
---|
1838 | self.NEWFILE_CONTENTS)) |
---|
1839 | return d |
---|
1840 | |
---|
1841 | def test_POST_upload_no_replace_queryarg(self): |
---|
1842 | d = self.POST(self.public_url + "/foo?replace=false", t="upload", |
---|
1843 | file=("bar.txt", self.NEWFILE_CONTENTS)) |
---|
1844 | d.addBoth(self.shouldFail, error.Error, |
---|
1845 | "POST_upload_no_replace_queryarg", |
---|
1846 | "409 Conflict", |
---|
1847 | "There was already a child by that name, and you asked me " |
---|
1848 | "to not replace it") |
---|
1849 | d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt")) |
---|
1850 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
1851 | return d |
---|
1852 | |
---|
1853 | def test_POST_upload_no_replace_field(self): |
---|
1854 | d = self.POST(self.public_url + "/foo", t="upload", replace="false", |
---|
1855 | file=("bar.txt", self.NEWFILE_CONTENTS)) |
---|
1856 | d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field", |
---|
1857 | "409 Conflict", |
---|
1858 | "There was already a child by that name, and you asked me " |
---|
1859 | "to not replace it") |
---|
1860 | d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt")) |
---|
1861 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
1862 | return d |
---|
1863 | |
---|
1864 | def test_POST_upload_whendone(self): |
---|
1865 | d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE", |
---|
1866 | file=("new.txt", self.NEWFILE_CONTENTS)) |
---|
1867 | d.addBoth(self.shouldRedirect, "/THERE") |
---|
1868 | fn = self._foo_node |
---|
1869 | d.addCallback(lambda res: |
---|
1870 | self.failUnlessChildContentsAre(fn, u"new.txt", |
---|
1871 | self.NEWFILE_CONTENTS)) |
---|
1872 | return d |
---|
1873 | |
---|
1874 | def test_POST_upload_named(self): |
---|
1875 | fn = self._foo_node |
---|
1876 | d = self.POST(self.public_url + "/foo", t="upload", |
---|
1877 | name="new.txt", file=self.NEWFILE_CONTENTS) |
---|
1878 | d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt") |
---|
1879 | d.addCallback(lambda res: |
---|
1880 | self.failUnlessChildContentsAre(fn, u"new.txt", |
---|
1881 | self.NEWFILE_CONTENTS)) |
---|
1882 | return d |
---|
1883 | |
---|
1884 | def test_POST_upload_named_badfilename(self): |
---|
1885 | d = self.POST(self.public_url + "/foo", t="upload", |
---|
1886 | name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS) |
---|
1887 | d.addBoth(self.shouldFail, error.Error, |
---|
1888 | "test_POST_upload_named_badfilename", |
---|
1889 | "400 Bad Request", |
---|
1890 | "name= may not contain a slash", |
---|
1891 | ) |
---|
1892 | # make sure that nothing was added |
---|
1893 | d.addCallback(lambda res: |
---|
1894 | self.failUnlessNodeKeysAre(self._foo_node, |
---|
1895 | [u"bar.txt", u"blockingfile", |
---|
1896 | u"empty", u"n\u00fc.txt", |
---|
1897 | u"sub"])) |
---|
1898 | return d |
---|
1899 | |
---|
1900 | def test_POST_FILEURL_check(self): |
---|
1901 | bar_url = self.public_url + "/foo/bar.txt" |
---|
1902 | d = self.POST(bar_url, t="check") |
---|
1903 | def _check(res): |
---|
1904 | self.failUnless("Healthy :" in res) |
---|
1905 | d.addCallback(_check) |
---|
1906 | redir_url = "http://allmydata.org/TARGET" |
---|
1907 | def _check2(statuscode, target): |
---|
1908 | self.failUnlessReallyEqual(statuscode, str(http.FOUND)) |
---|
1909 | self.failUnlessReallyEqual(target, redir_url) |
---|
1910 | d.addCallback(lambda res: |
---|
1911 | self.shouldRedirect2("test_POST_FILEURL_check", |
---|
1912 | _check2, |
---|
1913 | self.POST, bar_url, |
---|
1914 | t="check", |
---|
1915 | when_done=redir_url)) |
---|
1916 | d.addCallback(lambda res: |
---|
1917 | self.POST(bar_url, t="check", return_to=redir_url)) |
---|
1918 | def _check3(res): |
---|
1919 | self.failUnless("Healthy :" in res) |
---|
1920 | self.failUnless("Return to file" in res) |
---|
1921 | self.failUnless(redir_url in res) |
---|
1922 | d.addCallback(_check3) |
---|
1923 | |
---|
1924 | d.addCallback(lambda res: |
---|
1925 | self.POST(bar_url, t="check", output="JSON")) |
---|
1926 | def _check_json(res): |
---|
1927 | data = simplejson.loads(res) |
---|
1928 | self.failUnless("storage-index" in data) |
---|
1929 | self.failUnless(data["results"]["healthy"]) |
---|
1930 | d.addCallback(_check_json) |
---|
1931 | |
---|
1932 | return d |
---|
1933 | |
---|
1934 | def test_POST_FILEURL_check_and_repair(self): |
---|
1935 | bar_url = self.public_url + "/foo/bar.txt" |
---|
1936 | d = self.POST(bar_url, t="check", repair="true") |
---|
1937 | def _check(res): |
---|
1938 | self.failUnless("Healthy :" in res) |
---|
1939 | d.addCallback(_check) |
---|
1940 | redir_url = "http://allmydata.org/TARGET" |
---|
1941 | def _check2(statuscode, target): |
---|
1942 | self.failUnlessReallyEqual(statuscode, str(http.FOUND)) |
---|
1943 | self.failUnlessReallyEqual(target, redir_url) |
---|
1944 | d.addCallback(lambda res: |
---|
1945 | self.shouldRedirect2("test_POST_FILEURL_check_and_repair", |
---|
1946 | _check2, |
---|
1947 | self.POST, bar_url, |
---|
1948 | t="check", repair="true", |
---|
1949 | when_done=redir_url)) |
---|
1950 | d.addCallback(lambda res: |
---|
1951 | self.POST(bar_url, t="check", return_to=redir_url)) |
---|
1952 | def _check3(res): |
---|
1953 | self.failUnless("Healthy :" in res) |
---|
1954 | self.failUnless("Return to file" in res) |
---|
1955 | self.failUnless(redir_url in res) |
---|
1956 | d.addCallback(_check3) |
---|
1957 | return d |
---|
1958 | |
---|
1959 | def test_POST_DIRURL_check(self): |
---|
1960 | foo_url = self.public_url + "/foo/" |
---|
1961 | d = self.POST(foo_url, t="check") |
---|
1962 | def _check(res): |
---|
1963 | self.failUnless("Healthy :" in res, res) |
---|
1964 | d.addCallback(_check) |
---|
1965 | redir_url = "http://allmydata.org/TARGET" |
---|
1966 | def _check2(statuscode, target): |
---|
1967 | self.failUnlessReallyEqual(statuscode, str(http.FOUND)) |
---|
1968 | self.failUnlessReallyEqual(target, redir_url) |
---|
1969 | d.addCallback(lambda res: |
---|
1970 | self.shouldRedirect2("test_POST_DIRURL_check", |
---|
1971 | _check2, |
---|
1972 | self.POST, foo_url, |
---|
1973 | t="check", |
---|
1974 | when_done=redir_url)) |
---|
1975 | d.addCallback(lambda res: |
---|
1976 | self.POST(foo_url, t="check", return_to=redir_url)) |
---|
1977 | def _check3(res): |
---|
1978 | self.failUnless("Healthy :" in res, res) |
---|
1979 | self.failUnless("Return to file/directory" in res) |
---|
1980 | self.failUnless(redir_url in res) |
---|
1981 | d.addCallback(_check3) |
---|
1982 | |
---|
1983 | d.addCallback(lambda res: |
---|
1984 | self.POST(foo_url, t="check", output="JSON")) |
---|
1985 | def _check_json(res): |
---|
1986 | data = simplejson.loads(res) |
---|
1987 | self.failUnless("storage-index" in data) |
---|
1988 | self.failUnless(data["results"]["healthy"]) |
---|
1989 | d.addCallback(_check_json) |
---|
1990 | |
---|
1991 | return d |
---|
1992 | |
---|
1993 | def test_POST_DIRURL_check_and_repair(self): |
---|
1994 | foo_url = self.public_url + "/foo/" |
---|
1995 | d = self.POST(foo_url, t="check", repair="true") |
---|
1996 | def _check(res): |
---|
1997 | self.failUnless("Healthy :" in res, res) |
---|
1998 | d.addCallback(_check) |
---|
1999 | redir_url = "http://allmydata.org/TARGET" |
---|
2000 | def _check2(statuscode, target): |
---|
2001 | self.failUnlessReallyEqual(statuscode, str(http.FOUND)) |
---|
2002 | self.failUnlessReallyEqual(target, redir_url) |
---|
2003 | d.addCallback(lambda res: |
---|
2004 | self.shouldRedirect2("test_POST_DIRURL_check_and_repair", |
---|
2005 | _check2, |
---|
2006 | self.POST, foo_url, |
---|
2007 | t="check", repair="true", |
---|
2008 | when_done=redir_url)) |
---|
2009 | d.addCallback(lambda res: |
---|
2010 | self.POST(foo_url, t="check", return_to=redir_url)) |
---|
2011 | def _check3(res): |
---|
2012 | self.failUnless("Healthy :" in res) |
---|
2013 | self.failUnless("Return to file/directory" in res) |
---|
2014 | self.failUnless(redir_url in res) |
---|
2015 | d.addCallback(_check3) |
---|
2016 | return d |
---|
2017 | |
---|
2018 | def wait_for_operation(self, ignored, ophandle): |
---|
2019 | url = "/operations/" + ophandle |
---|
2020 | url += "?t=status&output=JSON" |
---|
2021 | d = self.GET(url) |
---|
2022 | def _got(res): |
---|
2023 | data = simplejson.loads(res) |
---|
2024 | if not data["finished"]: |
---|
2025 | d = self.stall(delay=1.0) |
---|
2026 | d.addCallback(self.wait_for_operation, ophandle) |
---|
2027 | return d |
---|
2028 | return data |
---|
2029 | d.addCallback(_got) |
---|
2030 | return d |
---|
2031 | |
---|
2032 | def get_operation_results(self, ignored, ophandle, output=None): |
---|
2033 | url = "/operations/" + ophandle |
---|
2034 | url += "?t=status" |
---|
2035 | if output: |
---|
2036 | url += "&output=" + output |
---|
2037 | d = self.GET(url) |
---|
2038 | def _got(res): |
---|
2039 | if output and output.lower() == "json": |
---|
2040 | return simplejson.loads(res) |
---|
2041 | return res |
---|
2042 | d.addCallback(_got) |
---|
2043 | return d |
---|
2044 | |
---|
2045 | def test_POST_DIRURL_deepcheck_no_ophandle(self): |
---|
2046 | d = self.shouldFail2(error.Error, |
---|
2047 | "test_POST_DIRURL_deepcheck_no_ophandle", |
---|
2048 | "400 Bad Request", |
---|
2049 | "slow operation requires ophandle=", |
---|
2050 | self.POST, self.public_url, t="start-deep-check") |
---|
2051 | return d |
---|
2052 | |
---|
2053 | def test_POST_DIRURL_deepcheck(self): |
---|
2054 | def _check_redirect(statuscode, target): |
---|
2055 | self.failUnlessReallyEqual(statuscode, str(http.FOUND)) |
---|
2056 | self.failUnless(target.endswith("/operations/123")) |
---|
2057 | d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect, |
---|
2058 | self.POST, self.public_url, |
---|
2059 | t="start-deep-check", ophandle="123") |
---|
2060 | d.addCallback(self.wait_for_operation, "123") |
---|
2061 | def _check_json(data): |
---|
2062 | self.failUnlessReallyEqual(data["finished"], True) |
---|
2063 | self.failUnlessReallyEqual(data["count-objects-checked"], 8) |
---|
2064 | self.failUnlessReallyEqual(data["count-objects-healthy"], 8) |
---|
2065 | d.addCallback(_check_json) |
---|
2066 | d.addCallback(self.get_operation_results, "123", "html") |
---|
2067 | def _check_html(res): |
---|
2068 | self.failUnless("Objects Checked: <span>8</span>" in res) |
---|
2069 | self.failUnless("Objects Healthy: <span>8</span>" in res) |
---|
2070 | d.addCallback(_check_html) |
---|
2071 | |
---|
2072 | d.addCallback(lambda res: |
---|
2073 | self.GET("/operations/123/")) |
---|
2074 | d.addCallback(_check_html) # should be the same as without the slash |
---|
2075 | |
---|
2076 | d.addCallback(lambda res: |
---|
2077 | self.shouldFail2(error.Error, "one", "404 Not Found", |
---|
2078 | "No detailed results for SI bogus", |
---|
2079 | self.GET, "/operations/123/bogus")) |
---|
2080 | |
---|
2081 | foo_si = self._foo_node.get_storage_index() |
---|
2082 | foo_si_s = base32.b2a(foo_si) |
---|
2083 | d.addCallback(lambda res: |
---|
2084 | self.GET("/operations/123/%s?output=JSON" % foo_si_s)) |
---|
2085 | def _check_foo_json(res): |
---|
2086 | data = simplejson.loads(res) |
---|
2087 | self.failUnlessEqual(data["storage-index"], foo_si_s) |
---|
2088 | self.failUnless(data["results"]["healthy"]) |
---|
2089 | d.addCallback(_check_foo_json) |
---|
2090 | return d |
---|
2091 | |
---|
2092 | def test_POST_DIRURL_deepcheck_and_repair(self): |
---|
2093 | d = self.POST(self.public_url, t="start-deep-check", repair="true", |
---|
2094 | ophandle="124", output="json", followRedirect=True) |
---|
2095 | d.addCallback(self.wait_for_operation, "124") |
---|
2096 | def _check_json(data): |
---|
2097 | self.failUnlessReallyEqual(data["finished"], True) |
---|
2098 | self.failUnlessReallyEqual(data["count-objects-checked"], 8) |
---|
2099 | self.failUnlessReallyEqual(data["count-objects-healthy-pre-repair"], 8) |
---|
2100 | self.failUnlessReallyEqual(data["count-objects-unhealthy-pre-repair"], 0) |
---|
2101 | self.failUnlessReallyEqual(data["count-corrupt-shares-pre-repair"], 0) |
---|
2102 | self.failUnlessReallyEqual(data["count-repairs-attempted"], 0) |
---|
2103 | self.failUnlessReallyEqual(data["count-repairs-successful"], 0) |
---|
2104 | self.failUnlessReallyEqual(data["count-repairs-unsuccessful"], 0) |
---|
2105 | self.failUnlessReallyEqual(data["count-objects-healthy-post-repair"], 8) |
---|
2106 | self.failUnlessReallyEqual(data["count-objects-unhealthy-post-repair"], 0) |
---|
2107 | self.failUnlessReallyEqual(data["count-corrupt-shares-post-repair"], 0) |
---|
2108 | d.addCallback(_check_json) |
---|
2109 | d.addCallback(self.get_operation_results, "124", "html") |
---|
2110 | def _check_html(res): |
---|
2111 | self.failUnless("Objects Checked: <span>8</span>" in res) |
---|
2112 | |
---|
2113 | self.failUnless("Objects Healthy (before repair): <span>8</span>" in res) |
---|
2114 | self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res) |
---|
2115 | self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res) |
---|
2116 | |
---|
2117 | self.failUnless("Repairs Attempted: <span>0</span>" in res) |
---|
2118 | self.failUnless("Repairs Successful: <span>0</span>" in res) |
---|
2119 | self.failUnless("Repairs Unsuccessful: <span>0</span>" in res) |
---|
2120 | |
---|
2121 | self.failUnless("Objects Healthy (after repair): <span>8</span>" in res) |
---|
2122 | self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res) |
---|
2123 | self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res) |
---|
2124 | d.addCallback(_check_html) |
---|
2125 | return d |
---|
2126 | |
---|
2127 | def test_POST_FILEURL_bad_t(self): |
---|
2128 | d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request", |
---|
2129 | "POST to file: bad t=bogus", |
---|
2130 | self.POST, self.public_url + "/foo/bar.txt", |
---|
2131 | t="bogus") |
---|
2132 | return d |
---|
2133 | |
---|
2134 | def test_POST_mkdir(self): # return value? |
---|
2135 | d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir") |
---|
2136 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
2137 | d.addCallback(self.failUnlessNodeKeysAre, []) |
---|
2138 | return d |
---|
2139 | |
---|
2140 | def test_POST_mkdir_initial_children(self): |
---|
2141 | (newkids, caps) = self._create_initial_children() |
---|
2142 | d = self.POST2(self.public_url + |
---|
2143 | "/foo?t=mkdir-with-children&name=newdir", |
---|
2144 | simplejson.dumps(newkids)) |
---|
2145 | d.addCallback(lambda res: |
---|
2146 | self.failUnlessNodeHasChild(self._foo_node, u"newdir")) |
---|
2147 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
2148 | d.addCallback(self.failUnlessNodeKeysAre, newkids.keys()) |
---|
2149 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
2150 | d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1']) |
---|
2151 | return d |
---|
2152 | |
---|
2153 | def test_POST_mkdir_immutable(self): |
---|
2154 | (newkids, caps) = self._create_immutable_children() |
---|
2155 | d = self.POST2(self.public_url + |
---|
2156 | "/foo?t=mkdir-immutable&name=newdir", |
---|
2157 | simplejson.dumps(newkids)) |
---|
2158 | d.addCallback(lambda res: |
---|
2159 | self.failUnlessNodeHasChild(self._foo_node, u"newdir")) |
---|
2160 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
2161 | d.addCallback(self.failUnlessNodeKeysAre, newkids.keys()) |
---|
2162 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
2163 | d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1']) |
---|
2164 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
2165 | d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap']) |
---|
2166 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
2167 | d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap']) |
---|
2168 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
2169 | d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap']) |
---|
2170 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
2171 | d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap']) |
---|
2172 | return d |
---|
2173 | |
---|
2174 | def test_POST_mkdir_immutable_bad(self): |
---|
2175 | (newkids, caps) = self._create_initial_children() |
---|
2176 | d = self.shouldFail2(error.Error, "test_POST_mkdir_immutable_bad", |
---|
2177 | "400 Bad Request", |
---|
2178 | "needed to be immutable but was not", |
---|
2179 | self.POST2, |
---|
2180 | self.public_url + |
---|
2181 | "/foo?t=mkdir-immutable&name=newdir", |
---|
2182 | simplejson.dumps(newkids)) |
---|
2183 | return d |
---|
2184 | |
---|
2185 | def test_POST_mkdir_2(self): |
---|
2186 | d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "") |
---|
2187 | d.addCallback(lambda res: |
---|
2188 | self.failUnlessNodeHasChild(self._foo_node, u"newdir")) |
---|
2189 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
2190 | d.addCallback(self.failUnlessNodeKeysAre, []) |
---|
2191 | return d |
---|
2192 | |
---|
2193 | def test_POST_mkdirs_2(self): |
---|
2194 | d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "") |
---|
2195 | d.addCallback(lambda res: |
---|
2196 | self.failUnlessNodeHasChild(self._foo_node, u"bardir")) |
---|
2197 | d.addCallback(lambda res: self._foo_node.get(u"bardir")) |
---|
2198 | d.addCallback(lambda bardirnode: bardirnode.get(u"newdir")) |
---|
2199 | d.addCallback(self.failUnlessNodeKeysAre, []) |
---|
2200 | return d |
---|
2201 | |
---|
2202 | def test_POST_mkdir_no_parentdir_noredirect(self): |
---|
2203 | d = self.POST("/uri?t=mkdir") |
---|
2204 | def _after_mkdir(res): |
---|
2205 | uri.DirectoryURI.init_from_string(res) |
---|
2206 | d.addCallback(_after_mkdir) |
---|
2207 | return d |
---|
2208 | |
---|
2209 | def test_POST_mkdir_no_parentdir_noredirect2(self): |
---|
2210 | # make sure form-based arguments (as on the welcome page) still work |
---|
2211 | d = self.POST("/uri", t="mkdir") |
---|
2212 | def _after_mkdir(res): |
---|
2213 | uri.DirectoryURI.init_from_string(res) |
---|
2214 | d.addCallback(_after_mkdir) |
---|
2215 | d.addErrback(self.explain_web_error) |
---|
2216 | return d |
---|
2217 | |
---|
2218 | def test_POST_mkdir_no_parentdir_redirect(self): |
---|
2219 | d = self.POST("/uri?t=mkdir&redirect_to_result=true") |
---|
2220 | d.addBoth(self.shouldRedirect, None, statuscode='303') |
---|
2221 | def _check_target(target): |
---|
2222 | target = urllib.unquote(target) |
---|
2223 | self.failUnless(target.startswith("uri/URI:DIR2:"), target) |
---|
2224 | d.addCallback(_check_target) |
---|
2225 | return d |
---|
2226 | |
---|
2227 | def test_POST_mkdir_no_parentdir_redirect2(self): |
---|
2228 | d = self.POST("/uri", t="mkdir", redirect_to_result="true") |
---|
2229 | d.addBoth(self.shouldRedirect, None, statuscode='303') |
---|
2230 | def _check_target(target): |
---|
2231 | target = urllib.unquote(target) |
---|
2232 | self.failUnless(target.startswith("uri/URI:DIR2:"), target) |
---|
2233 | d.addCallback(_check_target) |
---|
2234 | d.addErrback(self.explain_web_error) |
---|
2235 | return d |
---|
2236 | |
---|
2237 | def _make_readonly(self, u): |
---|
2238 | ro_uri = uri.from_string(u).get_readonly() |
---|
2239 | if ro_uri is None: |
---|
2240 | return None |
---|
2241 | return ro_uri.to_string() |
---|
2242 | |
---|
2243 | def _create_initial_children(self): |
---|
2244 | contents, n, filecap1 = self.makefile(12) |
---|
2245 | md1 = {"metakey1": "metavalue1"} |
---|
2246 | filecap2 = make_mutable_file_uri() |
---|
2247 | node3 = self.s.create_node_from_uri(make_mutable_file_uri()) |
---|
2248 | filecap3 = node3.get_readonly_uri() |
---|
2249 | node4 = self.s.create_node_from_uri(make_mutable_file_uri()) |
---|
2250 | dircap = DirectoryNode(node4, None, None).get_uri() |
---|
2251 | litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm" |
---|
2252 | emptydircap = "URI:DIR2-LIT:" |
---|
2253 | newkids = {u"child-imm": ["filenode", {"rw_uri": filecap1, |
---|
2254 | "ro_uri": self._make_readonly(filecap1), |
---|
2255 | "metadata": md1, }], |
---|
2256 | u"child-mutable": ["filenode", {"rw_uri": filecap2, |
---|
2257 | "ro_uri": self._make_readonly(filecap2)}], |
---|
2258 | u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}], |
---|
2259 | u"unknownchild-rw": ["unknown", {"rw_uri": unknown_rwcap, |
---|
2260 | "ro_uri": unknown_rocap}], |
---|
2261 | u"unknownchild-ro": ["unknown", {"ro_uri": unknown_rocap}], |
---|
2262 | u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}], |
---|
2263 | u"dirchild": ["dirnode", {"rw_uri": dircap, |
---|
2264 | "ro_uri": self._make_readonly(dircap)}], |
---|
2265 | u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}], |
---|
2266 | u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}], |
---|
2267 | } |
---|
2268 | return newkids, {'filecap1': filecap1, |
---|
2269 | 'filecap2': filecap2, |
---|
2270 | 'filecap3': filecap3, |
---|
2271 | 'unknown_rwcap': unknown_rwcap, |
---|
2272 | 'unknown_rocap': unknown_rocap, |
---|
2273 | 'unknown_immcap': unknown_immcap, |
---|
2274 | 'dircap': dircap, |
---|
2275 | 'litdircap': litdircap, |
---|
2276 | 'emptydircap': emptydircap} |
---|
2277 | |
---|
2278 | def _create_immutable_children(self): |
---|
2279 | contents, n, filecap1 = self.makefile(12) |
---|
2280 | md1 = {"metakey1": "metavalue1"} |
---|
2281 | tnode = create_chk_filenode("immutable directory contents\n"*10) |
---|
2282 | dnode = DirectoryNode(tnode, None, None) |
---|
2283 | assert not dnode.is_mutable() |
---|
2284 | immdircap = dnode.get_uri() |
---|
2285 | litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm" |
---|
2286 | emptydircap = "URI:DIR2-LIT:" |
---|
2287 | newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1, |
---|
2288 | "metadata": md1, }], |
---|
2289 | u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}], |
---|
2290 | u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}], |
---|
2291 | u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}], |
---|
2292 | u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}], |
---|
2293 | } |
---|
2294 | return newkids, {'filecap1': filecap1, |
---|
2295 | 'unknown_immcap': unknown_immcap, |
---|
2296 | 'immdircap': immdircap, |
---|
2297 | 'litdircap': litdircap, |
---|
2298 | 'emptydircap': emptydircap} |
---|
2299 | |
---|
2300 | def test_POST_mkdir_no_parentdir_initial_children(self): |
---|
2301 | (newkids, caps) = self._create_initial_children() |
---|
2302 | d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids)) |
---|
2303 | def _after_mkdir(res): |
---|
2304 | self.failUnless(res.startswith("URI:DIR"), res) |
---|
2305 | n = self.s.create_node_from_uri(res) |
---|
2306 | d2 = self.failUnlessNodeKeysAre(n, newkids.keys()) |
---|
2307 | d2.addCallback(lambda ign: |
---|
2308 | self.failUnlessROChildURIIs(n, u"child-imm", |
---|
2309 | caps['filecap1'])) |
---|
2310 | d2.addCallback(lambda ign: |
---|
2311 | self.failUnlessRWChildURIIs(n, u"child-mutable", |
---|
2312 | caps['filecap2'])) |
---|
2313 | d2.addCallback(lambda ign: |
---|
2314 | self.failUnlessROChildURIIs(n, u"child-mutable-ro", |
---|
2315 | caps['filecap3'])) |
---|
2316 | d2.addCallback(lambda ign: |
---|
2317 | self.failUnlessRWChildURIIs(n, u"unknownchild-rw", |
---|
2318 | caps['unknown_rwcap'])) |
---|
2319 | d2.addCallback(lambda ign: |
---|
2320 | self.failUnlessROChildURIIs(n, u"unknownchild-ro", |
---|
2321 | caps['unknown_rocap'])) |
---|
2322 | d2.addCallback(lambda ign: |
---|
2323 | self.failUnlessROChildURIIs(n, u"unknownchild-imm", |
---|
2324 | caps['unknown_immcap'])) |
---|
2325 | d2.addCallback(lambda ign: |
---|
2326 | self.failUnlessRWChildURIIs(n, u"dirchild", |
---|
2327 | caps['dircap'])) |
---|
2328 | return d2 |
---|
2329 | d.addCallback(_after_mkdir) |
---|
2330 | return d |
---|
2331 | |
---|
2332 | def test_POST_mkdir_no_parentdir_unexpected_children(self): |
---|
2333 | # the regular /uri?t=mkdir operation is specified to ignore its body. |
---|
2334 | # Only t=mkdir-with-children pays attention to it. |
---|
2335 | (newkids, caps) = self._create_initial_children() |
---|
2336 | d = self.shouldHTTPError("POST t=mkdir unexpected children", |
---|
2337 | 400, "Bad Request", |
---|
2338 | "t=mkdir does not accept children=, " |
---|
2339 | "try t=mkdir-with-children instead", |
---|
2340 | self.POST2, "/uri?t=mkdir", # without children |
---|
2341 | simplejson.dumps(newkids)) |
---|
2342 | return d |
---|
2343 | |
---|
2344 | def test_POST_noparent_bad(self): |
---|
2345 | d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request", |
---|
2346 | "/uri accepts only PUT, PUT?t=mkdir, " |
---|
2347 | "POST?t=upload, and POST?t=mkdir", |
---|
2348 | self.POST, "/uri?t=bogus") |
---|
2349 | return d |
---|
2350 | |
---|
2351 | def test_POST_mkdir_no_parentdir_immutable(self): |
---|
2352 | (newkids, caps) = self._create_immutable_children() |
---|
2353 | d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids)) |
---|
2354 | def _after_mkdir(res): |
---|
2355 | self.failUnless(res.startswith("URI:DIR"), res) |
---|
2356 | n = self.s.create_node_from_uri(res) |
---|
2357 | d2 = self.failUnlessNodeKeysAre(n, newkids.keys()) |
---|
2358 | d2.addCallback(lambda ign: |
---|
2359 | self.failUnlessROChildURIIs(n, u"child-imm", |
---|
2360 | caps['filecap1'])) |
---|
2361 | d2.addCallback(lambda ign: |
---|
2362 | self.failUnlessROChildURIIs(n, u"unknownchild-imm", |
---|
2363 | caps['unknown_immcap'])) |
---|
2364 | d2.addCallback(lambda ign: |
---|
2365 | self.failUnlessROChildURIIs(n, u"dirchild-imm", |
---|
2366 | caps['immdircap'])) |
---|
2367 | d2.addCallback(lambda ign: |
---|
2368 | self.failUnlessROChildURIIs(n, u"dirchild-lit", |
---|
2369 | caps['litdircap'])) |
---|
2370 | d2.addCallback(lambda ign: |
---|
2371 | self.failUnlessROChildURIIs(n, u"dirchild-empty", |
---|
2372 | caps['emptydircap'])) |
---|
2373 | return d2 |
---|
2374 | d.addCallback(_after_mkdir) |
---|
2375 | return d |
---|
2376 | |
---|
2377 | def test_POST_mkdir_no_parentdir_immutable_bad(self): |
---|
2378 | (newkids, caps) = self._create_initial_children() |
---|
2379 | d = self.shouldFail2(error.Error, |
---|
2380 | "test_POST_mkdir_no_parentdir_immutable_bad", |
---|
2381 | "400 Bad Request", |
---|
2382 | "needed to be immutable but was not", |
---|
2383 | self.POST2, |
---|
2384 | "/uri?t=mkdir-immutable", |
---|
2385 | simplejson.dumps(newkids)) |
---|
2386 | return d |
---|
2387 | |
---|
2388 | def test_welcome_page_mkdir_button(self): |
---|
2389 | # Fetch the welcome page. |
---|
2390 | d = self.GET("/") |
---|
2391 | def _after_get_welcome_page(res): |
---|
2392 | MKDIR_BUTTON_RE = re.compile( |
---|
2393 | '<form action="([^"]*)" method="post".*?' |
---|
2394 | '<input type="hidden" name="t" value="([^"]*)" />' |
---|
2395 | '<input type="hidden" name="([^"]*)" value="([^"]*)" />' |
---|
2396 | '<input type="submit" value="Create a directory" />', |
---|
2397 | re.I) |
---|
2398 | mo = MKDIR_BUTTON_RE.search(res) |
---|
2399 | formaction = mo.group(1) |
---|
2400 | formt = mo.group(2) |
---|
2401 | formaname = mo.group(3) |
---|
2402 | formavalue = mo.group(4) |
---|
2403 | return (formaction, formt, formaname, formavalue) |
---|
2404 | d.addCallback(_after_get_welcome_page) |
---|
2405 | def _after_parse_form(res): |
---|
2406 | (formaction, formt, formaname, formavalue) = res |
---|
2407 | return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue)) |
---|
2408 | d.addCallback(_after_parse_form) |
---|
2409 | d.addBoth(self.shouldRedirect, None, statuscode='303') |
---|
2410 | return d |
---|
2411 | |
---|
2412 | def test_POST_mkdir_replace(self): # return value? |
---|
2413 | d = self.POST(self.public_url + "/foo", t="mkdir", name="sub") |
---|
2414 | d.addCallback(lambda res: self._foo_node.get(u"sub")) |
---|
2415 | d.addCallback(self.failUnlessNodeKeysAre, []) |
---|
2416 | return d |
---|
2417 | |
---|
2418 | def test_POST_mkdir_no_replace_queryarg(self): # return value? |
---|
2419 | d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub") |
---|
2420 | d.addBoth(self.shouldFail, error.Error, |
---|
2421 | "POST_mkdir_no_replace_queryarg", |
---|
2422 | "409 Conflict", |
---|
2423 | "There was already a child by that name, and you asked me " |
---|
2424 | "to not replace it") |
---|
2425 | d.addCallback(lambda res: self._foo_node.get(u"sub")) |
---|
2426 | d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"]) |
---|
2427 | return d |
---|
2428 | |
---|
2429 | def test_POST_mkdir_no_replace_field(self): # return value? |
---|
2430 | d = self.POST(self.public_url + "/foo", t="mkdir", name="sub", |
---|
2431 | replace="false") |
---|
2432 | d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field", |
---|
2433 | "409 Conflict", |
---|
2434 | "There was already a child by that name, and you asked me " |
---|
2435 | "to not replace it") |
---|
2436 | d.addCallback(lambda res: self._foo_node.get(u"sub")) |
---|
2437 | d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"]) |
---|
2438 | return d |
---|
2439 | |
---|
2440 | def test_POST_mkdir_whendone_field(self): |
---|
2441 | d = self.POST(self.public_url + "/foo", |
---|
2442 | t="mkdir", name="newdir", when_done="/THERE") |
---|
2443 | d.addBoth(self.shouldRedirect, "/THERE") |
---|
2444 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
2445 | d.addCallback(self.failUnlessNodeKeysAre, []) |
---|
2446 | return d |
---|
2447 | |
---|
2448 | def test_POST_mkdir_whendone_queryarg(self): |
---|
2449 | d = self.POST(self.public_url + "/foo?when_done=/THERE", |
---|
2450 | t="mkdir", name="newdir") |
---|
2451 | d.addBoth(self.shouldRedirect, "/THERE") |
---|
2452 | d.addCallback(lambda res: self._foo_node.get(u"newdir")) |
---|
2453 | d.addCallback(self.failUnlessNodeKeysAre, []) |
---|
2454 | return d |
---|
2455 | |
---|
2456 | def test_POST_bad_t(self): |
---|
2457 | d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request", |
---|
2458 | "POST to a directory with bad t=BOGUS", |
---|
2459 | self.POST, self.public_url + "/foo", t="BOGUS") |
---|
2460 | return d |
---|
2461 | |
---|
2462 | def test_POST_set_children(self, command_name="set_children"): |
---|
2463 | contents9, n9, newuri9 = self.makefile(9) |
---|
2464 | contents10, n10, newuri10 = self.makefile(10) |
---|
2465 | contents11, n11, newuri11 = self.makefile(11) |
---|
2466 | |
---|
2467 | reqbody = """{ |
---|
2468 | "atomic_added_1": [ "filenode", { "rw_uri": "%s", |
---|
2469 | "size": 0, |
---|
2470 | "metadata": { |
---|
2471 | "ctime": 1002777696.7564139, |
---|
2472 | "mtime": 1002777696.7564139 |
---|
2473 | } |
---|
2474 | } ], |
---|
2475 | "atomic_added_2": [ "filenode", { "rw_uri": "%s", |
---|
2476 | "size": 1, |
---|
2477 | "metadata": { |
---|
2478 | "ctime": 1002777696.7564139, |
---|
2479 | "mtime": 1002777696.7564139 |
---|
2480 | } |
---|
2481 | } ], |
---|
2482 | "atomic_added_3": [ "filenode", { "rw_uri": "%s", |
---|
2483 | "size": 2, |
---|
2484 | "metadata": { |
---|
2485 | "ctime": 1002777696.7564139, |
---|
2486 | "mtime": 1002777696.7564139 |
---|
2487 | } |
---|
2488 | } ] |
---|
2489 | }""" % (newuri9, newuri10, newuri11) |
---|
2490 | |
---|
2491 | url = self.webish_url + self.public_url + "/foo" + "?t=" + command_name |
---|
2492 | |
---|
2493 | d = client.getPage(url, method="POST", postdata=reqbody) |
---|
2494 | def _then(res): |
---|
2495 | self.failUnlessURIMatchesROChild(newuri9, self._foo_node, u"atomic_added_1") |
---|
2496 | self.failUnlessURIMatchesROChild(newuri10, self._foo_node, u"atomic_added_2") |
---|
2497 | self.failUnlessURIMatchesROChild(newuri11, self._foo_node, u"atomic_added_3") |
---|
2498 | |
---|
2499 | d.addCallback(_then) |
---|
2500 | d.addErrback(self.dump_error) |
---|
2501 | return d |
---|
2502 | |
---|
2503 | def test_POST_set_children_with_hyphen(self): |
---|
2504 | return self.test_POST_set_children(command_name="set-children") |
---|
2505 | |
---|
2506 | def test_POST_link_uri(self): |
---|
2507 | contents, n, newuri = self.makefile(8) |
---|
2508 | d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri) |
---|
2509 | d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt") |
---|
2510 | d.addCallback(lambda res: |
---|
2511 | self.failUnlessChildContentsAre(self._foo_node, u"new.txt", |
---|
2512 | contents)) |
---|
2513 | return d |
---|
2514 | |
---|
2515 | def test_POST_link_uri_replace(self): |
---|
2516 | contents, n, newuri = self.makefile(8) |
---|
2517 | d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri) |
---|
2518 | d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt") |
---|
2519 | d.addCallback(lambda res: |
---|
2520 | self.failUnlessChildContentsAre(self._foo_node, u"bar.txt", |
---|
2521 | contents)) |
---|
2522 | return d |
---|
2523 | |
---|
2524 | def test_POST_link_uri_unknown_bad(self): |
---|
2525 | d = self.POST(self.public_url + "/foo", t="uri", name="future.txt", uri=unknown_rwcap) |
---|
2526 | d.addBoth(self.shouldFail, error.Error, |
---|
2527 | "POST_link_uri_unknown_bad", |
---|
2528 | "400 Bad Request", |
---|
2529 | "unknown cap in a write slot") |
---|
2530 | return d |
---|
2531 | |
---|
2532 | def test_POST_link_uri_unknown_ro_good(self): |
---|
2533 | d = self.POST(self.public_url + "/foo", t="uri", name="future-ro.txt", uri=unknown_rocap) |
---|
2534 | d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-ro.txt") |
---|
2535 | return d |
---|
2536 | |
---|
2537 | def test_POST_link_uri_unknown_imm_good(self): |
---|
2538 | d = self.POST(self.public_url + "/foo", t="uri", name="future-imm.txt", uri=unknown_immcap) |
---|
2539 | d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-imm.txt") |
---|
2540 | return d |
---|
2541 | |
---|
2542 | def test_POST_link_uri_no_replace_queryarg(self): |
---|
2543 | contents, n, newuri = self.makefile(8) |
---|
2544 | d = self.POST(self.public_url + "/foo?replace=false", t="uri", |
---|
2545 | name="bar.txt", uri=newuri) |
---|
2546 | d.addBoth(self.shouldFail, error.Error, |
---|
2547 | "POST_link_uri_no_replace_queryarg", |
---|
2548 | "409 Conflict", |
---|
2549 | "There was already a child by that name, and you asked me " |
---|
2550 | "to not replace it") |
---|
2551 | d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt")) |
---|
2552 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
2553 | return d |
---|
2554 | |
---|
2555 | def test_POST_link_uri_no_replace_field(self): |
---|
2556 | contents, n, newuri = self.makefile(8) |
---|
2557 | d = self.POST(self.public_url + "/foo", t="uri", replace="false", |
---|
2558 | name="bar.txt", uri=newuri) |
---|
2559 | d.addBoth(self.shouldFail, error.Error, |
---|
2560 | "POST_link_uri_no_replace_field", |
---|
2561 | "409 Conflict", |
---|
2562 | "There was already a child by that name, and you asked me " |
---|
2563 | "to not replace it") |
---|
2564 | d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt")) |
---|
2565 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
2566 | return d |
---|
2567 | |
---|
2568 | def test_POST_delete(self): |
---|
2569 | d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt") |
---|
2570 | d.addCallback(lambda res: self._foo_node.list()) |
---|
2571 | def _check(children): |
---|
2572 | self.failIf(u"bar.txt" in children) |
---|
2573 | d.addCallback(_check) |
---|
2574 | return d |
---|
2575 | |
---|
2576 | def test_POST_rename_file(self): |
---|
2577 | d = self.POST(self.public_url + "/foo", t="rename", |
---|
2578 | from_name="bar.txt", to_name='wibble.txt') |
---|
2579 | d.addCallback(lambda res: |
---|
2580 | self.failIfNodeHasChild(self._foo_node, u"bar.txt")) |
---|
2581 | d.addCallback(lambda res: |
---|
2582 | self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt")) |
---|
2583 | d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt")) |
---|
2584 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
2585 | d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json")) |
---|
2586 | d.addCallback(self.failUnlessIsBarJSON) |
---|
2587 | return d |
---|
2588 | |
---|
2589 | def test_POST_rename_file_redundant(self): |
---|
2590 | d = self.POST(self.public_url + "/foo", t="rename", |
---|
2591 | from_name="bar.txt", to_name='bar.txt') |
---|
2592 | d.addCallback(lambda res: |
---|
2593 | self.failUnlessNodeHasChild(self._foo_node, u"bar.txt")) |
---|
2594 | d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt")) |
---|
2595 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
2596 | d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json")) |
---|
2597 | d.addCallback(self.failUnlessIsBarJSON) |
---|
2598 | return d |
---|
2599 | |
---|
2600 | def test_POST_rename_file_replace(self): |
---|
2601 | # rename a file and replace a directory with it |
---|
2602 | d = self.POST(self.public_url + "/foo", t="rename", |
---|
2603 | from_name="bar.txt", to_name='empty') |
---|
2604 | d.addCallback(lambda res: |
---|
2605 | self.failIfNodeHasChild(self._foo_node, u"bar.txt")) |
---|
2606 | d.addCallback(lambda res: |
---|
2607 | self.failUnlessNodeHasChild(self._foo_node, u"empty")) |
---|
2608 | d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty")) |
---|
2609 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
2610 | d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json")) |
---|
2611 | d.addCallback(self.failUnlessIsBarJSON) |
---|
2612 | return d |
---|
2613 | |
---|
2614 | def test_POST_rename_file_no_replace_queryarg(self): |
---|
2615 | # rename a file and replace a directory with it |
---|
2616 | d = self.POST(self.public_url + "/foo?replace=false", t="rename", |
---|
2617 | from_name="bar.txt", to_name='empty') |
---|
2618 | d.addBoth(self.shouldFail, error.Error, |
---|
2619 | "POST_rename_file_no_replace_queryarg", |
---|
2620 | "409 Conflict", |
---|
2621 | "There was already a child by that name, and you asked me " |
---|
2622 | "to not replace it") |
---|
2623 | d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json")) |
---|
2624 | d.addCallback(self.failUnlessIsEmptyJSON) |
---|
2625 | return d |
---|
2626 | |
---|
2627 | def test_POST_rename_file_no_replace_field(self): |
---|
2628 | # rename a file and replace a directory with it |
---|
2629 | d = self.POST(self.public_url + "/foo", t="rename", replace="false", |
---|
2630 | from_name="bar.txt", to_name='empty') |
---|
2631 | d.addBoth(self.shouldFail, error.Error, |
---|
2632 | "POST_rename_file_no_replace_field", |
---|
2633 | "409 Conflict", |
---|
2634 | "There was already a child by that name, and you asked me " |
---|
2635 | "to not replace it") |
---|
2636 | d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json")) |
---|
2637 | d.addCallback(self.failUnlessIsEmptyJSON) |
---|
2638 | return d |
---|
2639 | |
---|
2640 | def failUnlessIsEmptyJSON(self, res): |
---|
2641 | data = simplejson.loads(res) |
---|
2642 | self.failUnlessEqual(data[0], "dirnode", data) |
---|
2643 | self.failUnlessReallyEqual(len(data[1]["children"]), 0) |
---|
2644 | |
---|
2645 | def test_POST_rename_file_slash_fail(self): |
---|
2646 | d = self.POST(self.public_url + "/foo", t="rename", |
---|
2647 | from_name="bar.txt", to_name='kirk/spock.txt') |
---|
2648 | d.addBoth(self.shouldFail, error.Error, |
---|
2649 | "test_POST_rename_file_slash_fail", |
---|
2650 | "400 Bad Request", |
---|
2651 | "to_name= may not contain a slash", |
---|
2652 | ) |
---|
2653 | d.addCallback(lambda res: |
---|
2654 | self.failUnlessNodeHasChild(self._foo_node, u"bar.txt")) |
---|
2655 | return d |
---|
2656 | |
---|
2657 | def test_POST_rename_dir(self): |
---|
2658 | d = self.POST(self.public_url, t="rename", |
---|
2659 | from_name="foo", to_name='plunk') |
---|
2660 | d.addCallback(lambda res: |
---|
2661 | self.failIfNodeHasChild(self.public_root, u"foo")) |
---|
2662 | d.addCallback(lambda res: |
---|
2663 | self.failUnlessNodeHasChild(self.public_root, u"plunk")) |
---|
2664 | d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json")) |
---|
2665 | d.addCallback(self.failUnlessIsFooJSON) |
---|
2666 | return d |
---|
2667 | |
---|
2668 | def shouldRedirect(self, res, target=None, statuscode=None, which=""): |
---|
2669 | """ If target is not None then the redirection has to go to target. If |
---|
2670 | statuscode is not None then the redirection has to be accomplished with |
---|
2671 | that HTTP status code.""" |
---|
2672 | if not isinstance(res, failure.Failure): |
---|
2673 | to_where = (target is None) and "somewhere" or ("to " + target) |
---|
2674 | self.fail("%s: we were expecting to get redirected %s, not get an" |
---|
2675 | " actual page: %s" % (which, to_where, res)) |
---|
2676 | res.trap(error.PageRedirect) |
---|
2677 | if statuscode is not None: |
---|
2678 | self.failUnlessReallyEqual(res.value.status, statuscode, |
---|
2679 | "%s: not a redirect" % which) |
---|
2680 | if target is not None: |
---|
2681 | # the PageRedirect does not seem to capture the uri= query arg |
---|
2682 | # properly, so we can't check for it. |
---|
2683 | realtarget = self.webish_url + target |
---|
2684 | self.failUnlessReallyEqual(res.value.location, realtarget, |
---|
2685 | "%s: wrong target" % which) |
---|
2686 | return res.value.location |
---|
2687 | |
---|
2688 | def test_GET_URI_form(self): |
---|
2689 | base = "/uri?uri=%s" % self._bar_txt_uri |
---|
2690 | # this is supposed to give us a redirect to /uri/$URI, plus arguments |
---|
2691 | targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri) |
---|
2692 | d = self.GET(base) |
---|
2693 | d.addBoth(self.shouldRedirect, targetbase) |
---|
2694 | d.addCallback(lambda res: self.GET(base+"&filename=bar.txt")) |
---|
2695 | d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt") |
---|
2696 | d.addCallback(lambda res: self.GET(base+"&t=json")) |
---|
2697 | d.addBoth(self.shouldRedirect, targetbase+"?t=json") |
---|
2698 | d.addCallback(self.log, "about to get file by uri") |
---|
2699 | d.addCallback(lambda res: self.GET(base, followRedirect=True)) |
---|
2700 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
2701 | d.addCallback(self.log, "got file by uri, about to get dir by uri") |
---|
2702 | d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri, |
---|
2703 | followRedirect=True)) |
---|
2704 | d.addCallback(self.failUnlessIsFooJSON) |
---|
2705 | d.addCallback(self.log, "got dir by uri") |
---|
2706 | |
---|
2707 | return d |
---|
2708 | |
---|
2709 | def test_GET_URI_form_bad(self): |
---|
2710 | d = self.shouldFail2(error.Error, "test_GET_URI_form_bad", |
---|
2711 | "400 Bad Request", "GET /uri requires uri=", |
---|
2712 | self.GET, "/uri") |
---|
2713 | return d |
---|
2714 | |
---|
2715 | def test_GET_rename_form(self): |
---|
2716 | d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt", |
---|
2717 | followRedirect=True) |
---|
2718 | def _check(res): |
---|
2719 | self.failUnless('name="when_done" value="."' in res, res) |
---|
2720 | self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res)) |
---|
2721 | d.addCallback(_check) |
---|
2722 | return d |
---|
2723 | |
---|
2724 | def log(self, res, msg): |
---|
2725 | #print "MSG: %s RES: %s" % (msg, res) |
---|
2726 | log.msg(msg) |
---|
2727 | return res |
---|
2728 | |
---|
2729 | def test_GET_URI_URL(self): |
---|
2730 | base = "/uri/%s" % self._bar_txt_uri |
---|
2731 | d = self.GET(base) |
---|
2732 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
2733 | d.addCallback(lambda res: self.GET(base+"?filename=bar.txt")) |
---|
2734 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
2735 | d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true")) |
---|
2736 | d.addCallback(self.failUnlessIsBarDotTxt) |
---|
2737 | return d |
---|
2738 | |
---|
2739 | def test_GET_URI_URL_dir(self): |
---|
2740 | base = "/uri/%s?t=json" % self._foo_uri |
---|
2741 | d = self.GET(base) |
---|
2742 | d.addCallback(self.failUnlessIsFooJSON) |
---|
2743 | return d |
---|
2744 | |
---|
2745 | def test_GET_URI_URL_missing(self): |
---|
2746 | base = "/uri/%s" % self._bad_file_uri |
---|
2747 | d = self.shouldHTTPError("test_GET_URI_URL_missing", |
---|
2748 | http.GONE, None, "NotEnoughSharesError", |
---|
2749 | self.GET, base) |
---|
2750 | # TODO: how can we exercise both sides of WebDownloadTarget.fail |
---|
2751 | # here? we must arrange for a download to fail after target.open() |
---|
2752 | # has been called, and then inspect the response to see that it is |
---|
2753 | # shorter than we expected. |
---|
2754 | return d |
---|
2755 | |
---|
2756 | def test_PUT_DIRURL_uri(self): |
---|
2757 | d = self.s.create_dirnode() |
---|
2758 | def _made_dir(dn): |
---|
2759 | new_uri = dn.get_uri() |
---|
2760 | # replace /foo with a new (empty) directory |
---|
2761 | d = self.PUT(self.public_url + "/foo?t=uri", new_uri) |
---|
2762 | d.addCallback(lambda res: |
---|
2763 | self.failUnlessReallyEqual(res.strip(), new_uri)) |
---|
2764 | d.addCallback(lambda res: |
---|
2765 | self.failUnlessRWChildURIIs(self.public_root, |
---|
2766 | u"foo", |
---|
2767 | new_uri)) |
---|
2768 | return d |
---|
2769 | d.addCallback(_made_dir) |
---|
2770 | return d |
---|
2771 | |
---|
2772 | def test_PUT_DIRURL_uri_noreplace(self): |
---|
2773 | d = self.s.create_dirnode() |
---|
2774 | def _made_dir(dn): |
---|
2775 | new_uri = dn.get_uri() |
---|
2776 | # replace /foo with a new (empty) directory, but ask that |
---|
2777 | # replace=false, so it should fail |
---|
2778 | d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace", |
---|
2779 | "409 Conflict", "There was already a child by that name, and you asked me to not replace it", |
---|
2780 | self.PUT, |
---|
2781 | self.public_url + "/foo?t=uri&replace=false", |
---|
2782 | new_uri) |
---|
2783 | d.addCallback(lambda res: |
---|
2784 | self.failUnlessRWChildURIIs(self.public_root, |
---|
2785 | u"foo", |
---|
2786 | self._foo_uri)) |
---|
2787 | return d |
---|
2788 | d.addCallback(_made_dir) |
---|
2789 | return d |
---|
2790 | |
---|
2791 | def test_PUT_DIRURL_bad_t(self): |
---|
2792 | d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t", |
---|
2793 | "400 Bad Request", "PUT to a directory", |
---|
2794 | self.PUT, self.public_url + "/foo?t=BOGUS", "") |
---|
2795 | d.addCallback(lambda res: |
---|
2796 | self.failUnlessRWChildURIIs(self.public_root, |
---|
2797 | u"foo", |
---|
2798 | self._foo_uri)) |
---|
2799 | return d |
---|
2800 | |
---|
2801 | def test_PUT_NEWFILEURL_uri(self): |
---|
2802 | contents, n, new_uri = self.makefile(8) |
---|
2803 | d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri) |
---|
2804 | d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri)) |
---|
2805 | d.addCallback(lambda res: |
---|
2806 | self.failUnlessChildContentsAre(self._foo_node, u"new.txt", |
---|
2807 | contents)) |
---|
2808 | return d |
---|
2809 | |
---|
2810 | def test_PUT_NEWFILEURL_uri_replace(self): |
---|
2811 | contents, n, new_uri = self.makefile(8) |
---|
2812 | d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri) |
---|
2813 | d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri)) |
---|
2814 | d.addCallback(lambda res: |
---|
2815 | self.failUnlessChildContentsAre(self._foo_node, u"bar.txt", |
---|
2816 | contents)) |
---|
2817 | return d |
---|
2818 | |
---|
2819 | def test_PUT_NEWFILEURL_uri_no_replace(self): |
---|
2820 | contents, n, new_uri = self.makefile(8) |
---|
2821 | d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri) |
---|
2822 | d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace", |
---|
2823 | "409 Conflict", |
---|
2824 | "There was already a child by that name, and you asked me " |
---|
2825 | "to not replace it") |
---|
2826 | return d |
---|
2827 | |
---|
2828 | def test_PUT_NEWFILEURL_uri_unknown_bad(self): |
---|
2829 | d = self.PUT(self.public_url + "/foo/put-future.txt?t=uri", unknown_rwcap) |
---|
2830 | d.addBoth(self.shouldFail, error.Error, |
---|
2831 | "POST_put_uri_unknown_bad", |
---|
2832 | "400 Bad Request", |
---|
2833 | "unknown cap in a write slot") |
---|
2834 | return d |
---|
2835 | |
---|
2836 | def test_PUT_NEWFILEURL_uri_unknown_ro_good(self): |
---|
2837 | d = self.PUT(self.public_url + "/foo/put-future-ro.txt?t=uri", unknown_rocap) |
---|
2838 | d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, |
---|
2839 | u"put-future-ro.txt") |
---|
2840 | return d |
---|
2841 | |
---|
2842 | def test_PUT_NEWFILEURL_uri_unknown_imm_good(self): |
---|
2843 | d = self.PUT(self.public_url + "/foo/put-future-imm.txt?t=uri", unknown_immcap) |
---|
2844 | d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, |
---|
2845 | u"put-future-imm.txt") |
---|
2846 | return d |
---|
2847 | |
---|
2848 | def test_PUT_NEWFILE_URI(self): |
---|
2849 | file_contents = "New file contents here\n" |
---|
2850 | d = self.PUT("/uri", file_contents) |
---|
2851 | def _check(uri): |
---|
2852 | assert isinstance(uri, str), uri |
---|
2853 | self.failUnless(uri in FakeCHKFileNode.all_contents) |
---|
2854 | self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri], |
---|
2855 | file_contents) |
---|
2856 | return self.GET("/uri/%s" % uri) |
---|
2857 | d.addCallback(_check) |
---|
2858 | def _check2(res): |
---|
2859 | self.failUnlessReallyEqual(res, file_contents) |
---|
2860 | d.addCallback(_check2) |
---|
2861 | return d |
---|
2862 | |
---|
2863 | def test_PUT_NEWFILE_URI_not_mutable(self): |
---|
2864 | file_contents = "New file contents here\n" |
---|
2865 | d = self.PUT("/uri?mutable=false", file_contents) |
---|
2866 | def _check(uri): |
---|
2867 | assert isinstance(uri, str), uri |
---|
2868 | self.failUnless(uri in FakeCHKFileNode.all_contents) |
---|
2869 | self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri], |
---|
2870 | file_contents) |
---|
2871 | return self.GET("/uri/%s" % uri) |
---|
2872 | d.addCallback(_check) |
---|
2873 | def _check2(res): |
---|
2874 | self.failUnlessReallyEqual(res, file_contents) |
---|
2875 | d.addCallback(_check2) |
---|
2876 | return d |
---|
2877 | |
---|
2878 | def test_PUT_NEWFILE_URI_only_PUT(self): |
---|
2879 | d = self.PUT("/uri?t=bogus", "") |
---|
2880 | d.addBoth(self.shouldFail, error.Error, |
---|
2881 | "PUT_NEWFILE_URI_only_PUT", |
---|
2882 | "400 Bad Request", |
---|
2883 | "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir") |
---|
2884 | return d |
---|
2885 | |
---|
2886 | def test_PUT_NEWFILE_URI_mutable(self): |
---|
2887 | file_contents = "New file contents here\n" |
---|
2888 | d = self.PUT("/uri?mutable=true", file_contents) |
---|
2889 | def _check1(filecap): |
---|
2890 | filecap = filecap.strip() |
---|
2891 | self.failUnless(filecap.startswith("URI:SSK:"), filecap) |
---|
2892 | self.filecap = filecap |
---|
2893 | u = uri.WriteableSSKFileURI.init_from_string(filecap) |
---|
2894 | self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents) |
---|
2895 | n = self.s.create_node_from_uri(filecap) |
---|
2896 | return n.download_best_version() |
---|
2897 | d.addCallback(_check1) |
---|
2898 | def _check2(data): |
---|
2899 | self.failUnlessReallyEqual(data, file_contents) |
---|
2900 | return self.GET("/uri/%s" % urllib.quote(self.filecap)) |
---|
2901 | d.addCallback(_check2) |
---|
2902 | def _check3(res): |
---|
2903 | self.failUnlessReallyEqual(res, file_contents) |
---|
2904 | d.addCallback(_check3) |
---|
2905 | return d |
---|
2906 | |
---|
2907 | def test_PUT_mkdir(self): |
---|
2908 | d = self.PUT("/uri?t=mkdir", "") |
---|
2909 | def _check(uri): |
---|
2910 | n = self.s.create_node_from_uri(uri.strip()) |
---|
2911 | d2 = self.failUnlessNodeKeysAre(n, []) |
---|
2912 | d2.addCallback(lambda res: |
---|
2913 | self.GET("/uri/%s?t=json" % uri)) |
---|
2914 | return d2 |
---|
2915 | d.addCallback(_check) |
---|
2916 | d.addCallback(self.failUnlessIsEmptyJSON) |
---|
2917 | return d |
---|
2918 | |
---|
2919 | def test_POST_check(self): |
---|
2920 | d = self.POST(self.public_url + "/foo", t="check", name="bar.txt") |
---|
2921 | def _done(res): |
---|
2922 | # this returns a string form of the results, which are probably |
---|
2923 | # None since we're using fake filenodes. |
---|
2924 | # TODO: verify that the check actually happened, by changing |
---|
2925 | # FakeCHKFileNode to count how many times .check() has been |
---|
2926 | # called. |
---|
2927 | pass |
---|
2928 | d.addCallback(_done) |
---|
2929 | return d |
---|
2930 | |
---|
2931 | def test_bad_method(self): |
---|
2932 | url = self.webish_url + self.public_url + "/foo/bar.txt" |
---|
2933 | d = self.shouldHTTPError("test_bad_method", |
---|
2934 | 501, "Not Implemented", |
---|
2935 | "I don't know how to treat a BOGUS request.", |
---|
2936 | client.getPage, url, method="BOGUS") |
---|
2937 | return d |
---|
2938 | |
---|
2939 | def test_short_url(self): |
---|
2940 | url = self.webish_url + "/uri" |
---|
2941 | d = self.shouldHTTPError("test_short_url", 501, "Not Implemented", |
---|
2942 | "I don't know how to treat a DELETE request.", |
---|
2943 | client.getPage, url, method="DELETE") |
---|
2944 | return d |
---|
2945 | |
---|
2946 | def test_ophandle_bad(self): |
---|
2947 | url = self.webish_url + "/operations/bogus?t=status" |
---|
2948 | d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found", |
---|
2949 | "unknown/expired handle 'bogus'", |
---|
2950 | client.getPage, url) |
---|
2951 | return d |
---|
2952 | |
---|
2953 | def test_ophandle_cancel(self): |
---|
2954 | d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128", |
---|
2955 | followRedirect=True) |
---|
2956 | d.addCallback(lambda ignored: |
---|
2957 | self.GET("/operations/128?t=status&output=JSON")) |
---|
2958 | def _check1(res): |
---|
2959 | data = simplejson.loads(res) |
---|
2960 | self.failUnless("finished" in data, res) |
---|
2961 | monitor = self.ws.root.child_operations.handles["128"][0] |
---|
2962 | d = self.POST("/operations/128?t=cancel&output=JSON") |
---|
2963 | def _check2(res): |
---|
2964 | data = simplejson.loads(res) |
---|
2965 | self.failUnless("finished" in data, res) |
---|
2966 | # t=cancel causes the handle to be forgotten |
---|
2967 | self.failUnless(monitor.is_cancelled()) |
---|
2968 | d.addCallback(_check2) |
---|
2969 | return d |
---|
2970 | d.addCallback(_check1) |
---|
2971 | d.addCallback(lambda ignored: |
---|
2972 | self.shouldHTTPError("test_ophandle_cancel", |
---|
2973 | 404, "404 Not Found", |
---|
2974 | "unknown/expired handle '128'", |
---|
2975 | self.GET, |
---|
2976 | "/operations/128?t=status&output=JSON")) |
---|
2977 | return d |
---|
2978 | |
---|
2979 | def test_ophandle_retainfor(self): |
---|
2980 | d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60", |
---|
2981 | followRedirect=True) |
---|
2982 | d.addCallback(lambda ignored: |
---|
2983 | self.GET("/operations/129?t=status&output=JSON&retain-for=0")) |
---|
2984 | def _check1(res): |
---|
2985 | data = simplejson.loads(res) |
---|
2986 | self.failUnless("finished" in data, res) |
---|
2987 | d.addCallback(_check1) |
---|
2988 | # the retain-for=0 will cause the handle to be expired very soon |
---|
2989 | d.addCallback(lambda ign: |
---|
2990 | self.clock.advance(2.0)) |
---|
2991 | d.addCallback(lambda ignored: |
---|
2992 | self.shouldHTTPError("test_ophandle_retainfor", |
---|
2993 | 404, "404 Not Found", |
---|
2994 | "unknown/expired handle '129'", |
---|
2995 | self.GET, |
---|
2996 | "/operations/129?t=status&output=JSON")) |
---|
2997 | return d |
---|
2998 | |
---|
2999 | def test_ophandle_release_after_complete(self): |
---|
3000 | d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130", |
---|
3001 | followRedirect=True) |
---|
3002 | d.addCallback(self.wait_for_operation, "130") |
---|
3003 | d.addCallback(lambda ignored: |
---|
3004 | self.GET("/operations/130?t=status&output=JSON&release-after-complete=true")) |
---|
3005 | # the release-after-complete=true will cause the handle to be expired |
---|
3006 | d.addCallback(lambda ignored: |
---|
3007 | self.shouldHTTPError("test_ophandle_release_after_complete", |
---|
3008 | 404, "404 Not Found", |
---|
3009 | "unknown/expired handle '130'", |
---|
3010 | self.GET, |
---|
3011 | "/operations/130?t=status&output=JSON")) |
---|
3012 | return d |
---|
3013 | |
---|
3014 | def test_uncollected_ophandle_expiration(self): |
---|
3015 | # uncollected ophandles should expire after 4 days |
---|
3016 | def _make_uncollected_ophandle(ophandle): |
---|
3017 | d = self.POST(self.public_url + |
---|
3018 | "/foo/?t=start-manifest&ophandle=%d" % ophandle, |
---|
3019 | followRedirect=False) |
---|
3020 | # When we start the operation, the webapi server will want |
---|
3021 | # to redirect us to the page for the ophandle, so we get |
---|
3022 | # confirmation that the operation has started. If the |
---|
3023 | # manifest operation has finished by the time we get there, |
---|
3024 | # following that redirect (by setting followRedirect=True |
---|
3025 | # above) has the side effect of collecting the ophandle that |
---|
3026 | # we've just created, which means that we can't use the |
---|
3027 | # ophandle to test the uncollected timeout anymore. So, |
---|
3028 | # instead, catch the 302 here and don't follow it. |
---|
3029 | d.addBoth(self.should302, "uncollected_ophandle_creation") |
---|
3030 | return d |
---|
3031 | # Create an ophandle, don't collect it, then advance the clock by |
---|
3032 | # 4 days - 1 second and make sure that the ophandle is still there. |
---|
3033 | d = _make_uncollected_ophandle(131) |
---|
3034 | d.addCallback(lambda ign: |
---|
3035 | self.clock.advance((96*60*60) - 1)) # 96 hours = 4 days |
---|
3036 | d.addCallback(lambda ign: |
---|
3037 | self.GET("/operations/131?t=status&output=JSON")) |
---|
3038 | def _check1(res): |
---|
3039 | data = simplejson.loads(res) |
---|
3040 | self.failUnless("finished" in data, res) |
---|
3041 | d.addCallback(_check1) |
---|
3042 | # Create an ophandle, don't collect it, then try to collect it |
---|
3043 | # after 4 days. It should be gone. |
---|
3044 | d.addCallback(lambda ign: |
---|
3045 | _make_uncollected_ophandle(132)) |
---|
3046 | d.addCallback(lambda ign: |
---|
3047 | self.clock.advance(96*60*60)) |
---|
3048 | d.addCallback(lambda ign: |
---|
3049 | self.shouldHTTPError("test_uncollected_ophandle_expired_after_100_hours", |
---|
3050 | 404, "404 Not Found", |
---|
3051 | "unknown/expired handle '132'", |
---|
3052 | self.GET, |
---|
3053 | "/operations/132?t=status&output=JSON")) |
---|
3054 | return d |
---|
3055 | |
---|
3056 | def test_collected_ophandle_expiration(self): |
---|
3057 | # collected ophandles should expire after 1 day |
---|
3058 | def _make_collected_ophandle(ophandle): |
---|
3059 | d = self.POST(self.public_url + |
---|
3060 | "/foo/?t=start-manifest&ophandle=%d" % ophandle, |
---|
3061 | followRedirect=True) |
---|
3062 | # By following the initial redirect, we collect the ophandle |
---|
3063 | # we've just created. |
---|
3064 | return d |
---|
3065 | # Create a collected ophandle, then collect it after 23 hours |
---|
3066 | # and 59 seconds to make sure that it is still there. |
---|
3067 | d = _make_collected_ophandle(133) |
---|
3068 | d.addCallback(lambda ign: |
---|
3069 | self.clock.advance((24*60*60) - 1)) |
---|
3070 | d.addCallback(lambda ign: |
---|
3071 | self.GET("/operations/133?t=status&output=JSON")) |
---|
3072 | def _check1(res): |
---|
3073 | data = simplejson.loads(res) |
---|
3074 | self.failUnless("finished" in data, res) |
---|
3075 | d.addCallback(_check1) |
---|
3076 | # Create another uncollected ophandle, then try to collect it |
---|
3077 | # after 24 hours to make sure that it is gone. |
---|
3078 | d.addCallback(lambda ign: |
---|
3079 | _make_collected_ophandle(134)) |
---|
3080 | d.addCallback(lambda ign: |
---|
3081 | self.clock.advance(24*60*60)) |
---|
3082 | d.addCallback(lambda ign: |
---|
3083 | self.shouldHTTPError("test_collected_ophandle_expired_after_1000_minutes", |
---|
3084 | 404, "404 Not Found", |
---|
3085 | "unknown/expired handle '134'", |
---|
3086 | self.GET, |
---|
3087 | "/operations/134?t=status&output=JSON")) |
---|
3088 | return d |
---|
3089 | |
---|
3090 | def test_incident(self): |
---|
3091 | d = self.POST("/report_incident", details="eek") |
---|
3092 | def _done(res): |
---|
3093 | self.failUnless("Thank you for your report!" in res, res) |
---|
3094 | d.addCallback(_done) |
---|
3095 | return d |
---|
3096 | |
---|
3097 | def test_static(self): |
---|
3098 | webdir = os.path.join(self.staticdir, "subdir") |
---|
3099 | fileutil.make_dirs(webdir) |
---|
3100 | f = open(os.path.join(webdir, "hello.txt"), "wb") |
---|
3101 | f.write("hello") |
---|
3102 | f.close() |
---|
3103 | |
---|
3104 | d = self.GET("/static/subdir/hello.txt") |
---|
3105 | def _check(res): |
---|
3106 | self.failUnlessReallyEqual(res, "hello") |
---|
3107 | d.addCallback(_check) |
---|
3108 | return d |
---|
3109 | |
---|
3110 | |
---|
3111 | class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase): |
---|
3112 | def test_load_file(self): |
---|
3113 | # This will raise an exception unless a well-formed XML file is found under that name. |
---|
3114 | common.getxmlfile('directory.xhtml').load() |
---|
3115 | |
---|
3116 | def test_parse_replace_arg(self): |
---|
3117 | self.failUnlessReallyEqual(common.parse_replace_arg("true"), True) |
---|
3118 | self.failUnlessReallyEqual(common.parse_replace_arg("false"), False) |
---|
3119 | self.failUnlessReallyEqual(common.parse_replace_arg("only-files"), |
---|
3120 | "only-files") |
---|
3121 | self.shouldFail(AssertionError, "test_parse_replace_arg", "", |
---|
3122 | common.parse_replace_arg, "only_fles") |
---|
3123 | |
---|
3124 | def test_abbreviate_time(self): |
---|
3125 | self.failUnlessReallyEqual(common.abbreviate_time(None), "") |
---|
3126 | self.failUnlessReallyEqual(common.abbreviate_time(1.234), "1.23s") |
---|
3127 | self.failUnlessReallyEqual(common.abbreviate_time(0.123), "123ms") |
---|
3128 | self.failUnlessReallyEqual(common.abbreviate_time(0.00123), "1.2ms") |
---|
3129 | self.failUnlessReallyEqual(common.abbreviate_time(0.000123), "123us") |
---|
3130 | self.failUnlessReallyEqual(common.abbreviate_time(-123000), "-123000000000us") |
---|
3131 | |
---|
3132 | def test_abbreviate_rate(self): |
---|
3133 | self.failUnlessReallyEqual(common.abbreviate_rate(None), "") |
---|
3134 | self.failUnlessReallyEqual(common.abbreviate_rate(1234000), "1.23MBps") |
---|
3135 | self.failUnlessReallyEqual(common.abbreviate_rate(12340), "12.3kBps") |
---|
3136 | self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps") |
---|
3137 | |
---|
3138 | def test_abbreviate_size(self): |
---|
3139 | self.failUnlessReallyEqual(common.abbreviate_size(None), "") |
---|
3140 | self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB") |
---|
3141 | self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB") |
---|
3142 | self.failUnlessReallyEqual(common.abbreviate_size(1230), "1.2kB") |
---|
3143 | self.failUnlessReallyEqual(common.abbreviate_size(123), "123B") |
---|
3144 | |
---|
3145 | def test_plural(self): |
---|
3146 | def convert(s): |
---|
3147 | return "%d second%s" % (s, status.plural(s)) |
---|
3148 | self.failUnlessReallyEqual(convert(0), "0 seconds") |
---|
3149 | self.failUnlessReallyEqual(convert(1), "1 second") |
---|
3150 | self.failUnlessReallyEqual(convert(2), "2 seconds") |
---|
3151 | def convert2(s): |
---|
3152 | return "has share%s: %s" % (status.plural(s), ",".join(s)) |
---|
3153 | self.failUnlessReallyEqual(convert2([]), "has shares: ") |
---|
3154 | self.failUnlessReallyEqual(convert2(["1"]), "has share: 1") |
---|
3155 | self.failUnlessReallyEqual(convert2(["1","2"]), "has shares: 1,2") |
---|
3156 | |
---|
3157 | |
---|
3158 | class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase): |
---|
3159 | |
---|
3160 | def CHECK(self, ign, which, args, clientnum=0): |
---|
3161 | fileurl = self.fileurls[which] |
---|
3162 | url = fileurl + "?" + args |
---|
3163 | return self.GET(url, method="POST", clientnum=clientnum) |
---|
3164 | |
---|
3165 | def test_filecheck(self): |
---|
3166 | self.basedir = "web/Grid/filecheck" |
---|
3167 | self.set_up_grid() |
---|
3168 | c0 = self.g.clients[0] |
---|
3169 | self.uris = {} |
---|
3170 | DATA = "data" * 100 |
---|
3171 | d = c0.upload(upload.Data(DATA, convergence="")) |
---|
3172 | def _stash_uri(ur, which): |
---|
3173 | self.uris[which] = ur.uri |
---|
3174 | d.addCallback(_stash_uri, "good") |
---|
3175 | d.addCallback(lambda ign: |
---|
3176 | c0.upload(upload.Data(DATA+"1", convergence=""))) |
---|
3177 | d.addCallback(_stash_uri, "sick") |
---|
3178 | d.addCallback(lambda ign: |
---|
3179 | c0.upload(upload.Data(DATA+"2", convergence=""))) |
---|
3180 | d.addCallback(_stash_uri, "dead") |
---|
3181 | def _stash_mutable_uri(n, which): |
---|
3182 | self.uris[which] = n.get_uri() |
---|
3183 | assert isinstance(self.uris[which], str) |
---|
3184 | d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3")) |
---|
3185 | d.addCallback(_stash_mutable_uri, "corrupt") |
---|
3186 | d.addCallback(lambda ign: |
---|
3187 | c0.upload(upload.Data("literal", convergence=""))) |
---|
3188 | d.addCallback(_stash_uri, "small") |
---|
3189 | d.addCallback(lambda ign: c0.create_immutable_dirnode({})) |
---|
3190 | d.addCallback(_stash_mutable_uri, "smalldir") |
---|
3191 | |
---|
3192 | def _compute_fileurls(ignored): |
---|
3193 | self.fileurls = {} |
---|
3194 | for which in self.uris: |
---|
3195 | self.fileurls[which] = "uri/" + urllib.quote(self.uris[which]) |
---|
3196 | d.addCallback(_compute_fileurls) |
---|
3197 | |
---|
3198 | def _clobber_shares(ignored): |
---|
3199 | good_shares = self.find_uri_shares(self.uris["good"]) |
---|
3200 | self.failUnlessReallyEqual(len(good_shares), 10) |
---|
3201 | sick_shares = self.find_uri_shares(self.uris["sick"]) |
---|
3202 | os.unlink(sick_shares[0][2]) |
---|
3203 | dead_shares = self.find_uri_shares(self.uris["dead"]) |
---|
3204 | for i in range(1, 10): |
---|
3205 | os.unlink(dead_shares[i][2]) |
---|
3206 | c_shares = self.find_uri_shares(self.uris["corrupt"]) |
---|
3207 | cso = CorruptShareOptions() |
---|
3208 | cso.stdout = StringIO() |
---|
3209 | cso.parseOptions([c_shares[0][2]]) |
---|
3210 | corrupt_share(cso) |
---|
3211 | d.addCallback(_clobber_shares) |
---|
3212 | |
---|
3213 | d.addCallback(self.CHECK, "good", "t=check") |
---|
3214 | def _got_html_good(res): |
---|
3215 | self.failUnless("Healthy" in res, res) |
---|
3216 | self.failIf("Not Healthy" in res, res) |
---|
3217 | d.addCallback(_got_html_good) |
---|
3218 | d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere") |
---|
3219 | def _got_html_good_return_to(res): |
---|
3220 | self.failUnless("Healthy" in res, res) |
---|
3221 | self.failIf("Not Healthy" in res, res) |
---|
3222 | self.failUnless('<a href="somewhere">Return to file' |
---|
3223 | in res, res) |
---|
3224 | d.addCallback(_got_html_good_return_to) |
---|
3225 | d.addCallback(self.CHECK, "good", "t=check&output=json") |
---|
3226 | def _got_json_good(res): |
---|
3227 | r = simplejson.loads(res) |
---|
3228 | self.failUnlessEqual(r["summary"], "Healthy") |
---|
3229 | self.failUnless(r["results"]["healthy"]) |
---|
3230 | self.failIf(r["results"]["needs-rebalancing"]) |
---|
3231 | self.failUnless(r["results"]["recoverable"]) |
---|
3232 | d.addCallback(_got_json_good) |
---|
3233 | |
---|
3234 | d.addCallback(self.CHECK, "small", "t=check") |
---|
3235 | def _got_html_small(res): |
---|
3236 | self.failUnless("Literal files are always healthy" in res, res) |
---|
3237 | self.failIf("Not Healthy" in res, res) |
---|
3238 | d.addCallback(_got_html_small) |
---|
3239 | d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere") |
---|
3240 | def _got_html_small_return_to(res): |
---|
3241 | self.failUnless("Literal files are always healthy" in res, res) |
---|
3242 | self.failIf("Not Healthy" in res, res) |
---|
3243 | self.failUnless('<a href="somewhere">Return to file' |
---|
3244 | in res, res) |
---|
3245 | d.addCallback(_got_html_small_return_to) |
---|
3246 | d.addCallback(self.CHECK, "small", "t=check&output=json") |
---|
3247 | def _got_json_small(res): |
---|
3248 | r = simplejson.loads(res) |
---|
3249 | self.failUnlessEqual(r["storage-index"], "") |
---|
3250 | self.failUnless(r["results"]["healthy"]) |
---|
3251 | d.addCallback(_got_json_small) |
---|
3252 | |
---|
3253 | d.addCallback(self.CHECK, "smalldir", "t=check") |
---|
3254 | def _got_html_smalldir(res): |
---|
3255 | self.failUnless("Literal files are always healthy" in res, res) |
---|
3256 | self.failIf("Not Healthy" in res, res) |
---|
3257 | d.addCallback(_got_html_smalldir) |
---|
3258 | d.addCallback(self.CHECK, "smalldir", "t=check&output=json") |
---|
3259 | def _got_json_smalldir(res): |
---|
3260 | r = simplejson.loads(res) |
---|
3261 | self.failUnlessEqual(r["storage-index"], "") |
---|
3262 | self.failUnless(r["results"]["healthy"]) |
---|
3263 | d.addCallback(_got_json_smalldir) |
---|
3264 | |
---|
3265 | d.addCallback(self.CHECK, "sick", "t=check") |
---|
3266 | def _got_html_sick(res): |
---|
3267 | self.failUnless("Not Healthy" in res, res) |
---|
3268 | d.addCallback(_got_html_sick) |
---|
3269 | d.addCallback(self.CHECK, "sick", "t=check&output=json") |
---|
3270 | def _got_json_sick(res): |
---|
3271 | r = simplejson.loads(res) |
---|
3272 | self.failUnlessEqual(r["summary"], |
---|
3273 | "Not Healthy: 9 shares (enc 3-of-10)") |
---|
3274 | self.failIf(r["results"]["healthy"]) |
---|
3275 | self.failIf(r["results"]["needs-rebalancing"]) |
---|
3276 | self.failUnless(r["results"]["recoverable"]) |
---|
3277 | d.addCallback(_got_json_sick) |
---|
3278 | |
---|
3279 | d.addCallback(self.CHECK, "dead", "t=check") |
---|
3280 | def _got_html_dead(res): |
---|
3281 | self.failUnless("Not Healthy" in res, res) |
---|
3282 | d.addCallback(_got_html_dead) |
---|
3283 | d.addCallback(self.CHECK, "dead", "t=check&output=json") |
---|
3284 | def _got_json_dead(res): |
---|
3285 | r = simplejson.loads(res) |
---|
3286 | self.failUnlessEqual(r["summary"], |
---|
3287 | "Not Healthy: 1 shares (enc 3-of-10)") |
---|
3288 | self.failIf(r["results"]["healthy"]) |
---|
3289 | self.failIf(r["results"]["needs-rebalancing"]) |
---|
3290 | self.failIf(r["results"]["recoverable"]) |
---|
3291 | d.addCallback(_got_json_dead) |
---|
3292 | |
---|
3293 | d.addCallback(self.CHECK, "corrupt", "t=check&verify=true") |
---|
3294 | def _got_html_corrupt(res): |
---|
3295 | self.failUnless("Not Healthy! : Unhealthy" in res, res) |
---|
3296 | d.addCallback(_got_html_corrupt) |
---|
3297 | d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json") |
---|
3298 | def _got_json_corrupt(res): |
---|
3299 | r = simplejson.loads(res) |
---|
3300 | self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"], |
---|
3301 | r["summary"]) |
---|
3302 | self.failIf(r["results"]["healthy"]) |
---|
3303 | self.failUnless(r["results"]["recoverable"]) |
---|
3304 | self.failUnlessReallyEqual(r["results"]["count-shares-good"], 9) |
---|
3305 | self.failUnlessReallyEqual(r["results"]["count-corrupt-shares"], 1) |
---|
3306 | d.addCallback(_got_json_corrupt) |
---|
3307 | |
---|
3308 | d.addErrback(self.explain_web_error) |
---|
3309 | return d |
---|
3310 | |
---|
3311 | def test_repair_html(self): |
---|
3312 | self.basedir = "web/Grid/repair_html" |
---|
3313 | self.set_up_grid() |
---|
3314 | c0 = self.g.clients[0] |
---|
3315 | self.uris = {} |
---|
3316 | DATA = "data" * 100 |
---|
3317 | d = c0.upload(upload.Data(DATA, convergence="")) |
---|
3318 | def _stash_uri(ur, which): |
---|
3319 | self.uris[which] = ur.uri |
---|
3320 | d.addCallback(_stash_uri, "good") |
---|
3321 | d.addCallback(lambda ign: |
---|
3322 | c0.upload(upload.Data(DATA+"1", convergence=""))) |
---|
3323 | d.addCallback(_stash_uri, "sick") |
---|
3324 | d.addCallback(lambda ign: |
---|
3325 | c0.upload(upload.Data(DATA+"2", convergence=""))) |
---|
3326 | d.addCallback(_stash_uri, "dead") |
---|
3327 | def _stash_mutable_uri(n, which): |
---|
3328 | self.uris[which] = n.get_uri() |
---|
3329 | assert isinstance(self.uris[which], str) |
---|
3330 | d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3")) |
---|
3331 | d.addCallback(_stash_mutable_uri, "corrupt") |
---|
3332 | |
---|
3333 | def _compute_fileurls(ignored): |
---|
3334 | self.fileurls = {} |
---|
3335 | for which in self.uris: |
---|
3336 | self.fileurls[which] = "uri/" + urllib.quote(self.uris[which]) |
---|
3337 | d.addCallback(_compute_fileurls) |
---|
3338 | |
---|
3339 | def _clobber_shares(ignored): |
---|
3340 | good_shares = self.find_uri_shares(self.uris["good"]) |
---|
3341 | self.failUnlessReallyEqual(len(good_shares), 10) |
---|
3342 | sick_shares = self.find_uri_shares(self.uris["sick"]) |
---|
3343 | os.unlink(sick_shares[0][2]) |
---|
3344 | dead_shares = self.find_uri_shares(self.uris["dead"]) |
---|
3345 | for i in range(1, 10): |
---|
3346 | os.unlink(dead_shares[i][2]) |
---|
3347 | c_shares = self.find_uri_shares(self.uris["corrupt"]) |
---|
3348 | cso = CorruptShareOptions() |
---|
3349 | cso.stdout = StringIO() |
---|
3350 | cso.parseOptions([c_shares[0][2]]) |
---|
3351 | corrupt_share(cso) |
---|
3352 | d.addCallback(_clobber_shares) |
---|
3353 | |
---|
3354 | d.addCallback(self.CHECK, "good", "t=check&repair=true") |
---|
3355 | def _got_html_good(res): |
---|
3356 | self.failUnless("Healthy" in res, res) |
---|
3357 | self.failIf("Not Healthy" in res, res) |
---|
3358 | self.failUnless("No repair necessary" in res, res) |
---|
3359 | d.addCallback(_got_html_good) |
---|
3360 | |
---|
3361 | d.addCallback(self.CHECK, "sick", "t=check&repair=true") |
---|
3362 | def _got_html_sick(res): |
---|
3363 | self.failUnless("Healthy : healthy" in res, res) |
---|
3364 | self.failIf("Not Healthy" in res, res) |
---|
3365 | self.failUnless("Repair successful" in res, res) |
---|
3366 | d.addCallback(_got_html_sick) |
---|
3367 | |
---|
3368 | # repair of a dead file will fail, of course, but it isn't yet |
---|
3369 | # clear how this should be reported. Right now it shows up as |
---|
3370 | # a "410 Gone". |
---|
3371 | # |
---|
3372 | #d.addCallback(self.CHECK, "dead", "t=check&repair=true") |
---|
3373 | #def _got_html_dead(res): |
---|
3374 | # print res |
---|
3375 | # self.failUnless("Healthy : healthy" in res, res) |
---|
3376 | # self.failIf("Not Healthy" in res, res) |
---|
3377 | # self.failUnless("No repair necessary" in res, res) |
---|
3378 | #d.addCallback(_got_html_dead) |
---|
3379 | |
---|
3380 | d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true") |
---|
3381 | def _got_html_corrupt(res): |
---|
3382 | self.failUnless("Healthy : Healthy" in res, res) |
---|
3383 | self.failIf("Not Healthy" in res, res) |
---|
3384 | self.failUnless("Repair successful" in res, res) |
---|
3385 | d.addCallback(_got_html_corrupt) |
---|
3386 | |
---|
3387 | d.addErrback(self.explain_web_error) |
---|
3388 | return d |
---|
3389 | |
---|
3390 | def test_repair_json(self): |
---|
3391 | self.basedir = "web/Grid/repair_json" |
---|
3392 | self.set_up_grid() |
---|
3393 | c0 = self.g.clients[0] |
---|
3394 | self.uris = {} |
---|
3395 | DATA = "data" * 100 |
---|
3396 | d = c0.upload(upload.Data(DATA+"1", convergence="")) |
---|
3397 | def _stash_uri(ur, which): |
---|
3398 | self.uris[which] = ur.uri |
---|
3399 | d.addCallback(_stash_uri, "sick") |
---|
3400 | |
---|
3401 | def _compute_fileurls(ignored): |
---|
3402 | self.fileurls = {} |
---|
3403 | for which in self.uris: |
---|
3404 | self.fileurls[which] = "uri/" + urllib.quote(self.uris[which]) |
---|
3405 | d.addCallback(_compute_fileurls) |
---|
3406 | |
---|
3407 | def _clobber_shares(ignored): |
---|
3408 | sick_shares = self.find_uri_shares(self.uris["sick"]) |
---|
3409 | os.unlink(sick_shares[0][2]) |
---|
3410 | d.addCallback(_clobber_shares) |
---|
3411 | |
---|
3412 | d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json") |
---|
3413 | def _got_json_sick(res): |
---|
3414 | r = simplejson.loads(res) |
---|
3415 | self.failUnlessReallyEqual(r["repair-attempted"], True) |
---|
3416 | self.failUnlessReallyEqual(r["repair-successful"], True) |
---|
3417 | self.failUnlessEqual(r["pre-repair-results"]["summary"], |
---|
3418 | "Not Healthy: 9 shares (enc 3-of-10)") |
---|
3419 | self.failIf(r["pre-repair-results"]["results"]["healthy"]) |
---|
3420 | self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy") |
---|
3421 | self.failUnless(r["post-repair-results"]["results"]["healthy"]) |
---|
3422 | d.addCallback(_got_json_sick) |
---|
3423 | |
---|
3424 | d.addErrback(self.explain_web_error) |
---|
3425 | return d |
---|
3426 | |
---|
3427 | def test_unknown(self, immutable=False): |
---|
3428 | self.basedir = "web/Grid/unknown" |
---|
3429 | if immutable: |
---|
3430 | self.basedir = "web/Grid/unknown-immutable" |
---|
3431 | |
---|
3432 | self.set_up_grid() |
---|
3433 | c0 = self.g.clients[0] |
---|
3434 | self.uris = {} |
---|
3435 | self.fileurls = {} |
---|
3436 | |
---|
3437 | # the future cap format may contain slashes, which must be tolerated |
---|
3438 | expected_info_url = "uri/%s?t=info" % urllib.quote(unknown_rwcap, |
---|
3439 | safe="") |
---|
3440 | |
---|
3441 | if immutable: |
---|
3442 | name = u"future-imm" |
---|
3443 | future_node = UnknownNode(None, unknown_immcap, deep_immutable=True) |
---|
3444 | d = c0.create_immutable_dirnode({name: (future_node, {})}) |
---|
3445 | else: |
---|
3446 | name = u"future" |
---|
3447 | future_node = UnknownNode(unknown_rwcap, unknown_rocap) |
---|
3448 | d = c0.create_dirnode() |
---|
3449 | |
---|
3450 | def _stash_root_and_create_file(n): |
---|
3451 | self.rootnode = n |
---|
3452 | self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/" |
---|
3453 | self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/" |
---|
3454 | if not immutable: |
---|
3455 | return self.rootnode.set_node(name, future_node) |
---|
3456 | d.addCallback(_stash_root_and_create_file) |
---|
3457 | |
---|
3458 | # make sure directory listing tolerates unknown nodes |
---|
3459 | d.addCallback(lambda ign: self.GET(self.rooturl)) |
---|
3460 | def _check_directory_html(res, expected_type_suffix): |
---|
3461 | pattern = re.compile(r'<td>\?%s</td>[ \t\n\r]*' |
---|
3462 | '<td>%s</td>' % (expected_type_suffix, str(name)), |
---|
3463 | re.DOTALL) |
---|
3464 | self.failUnless(re.search(pattern, res), res) |
---|
3465 | # find the More Info link for name, should be relative |
---|
3466 | mo = re.search(r'<a href="([^"]+)">More Info</a>', res) |
---|
3467 | info_url = mo.group(1) |
---|
3468 | self.failUnlessReallyEqual(info_url, "%s?t=info" % (str(name),)) |
---|
3469 | if immutable: |
---|
3470 | d.addCallback(_check_directory_html, "-IMM") |
---|
3471 | else: |
---|
3472 | d.addCallback(_check_directory_html, "") |
---|
3473 | |
---|
3474 | d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json")) |
---|
3475 | def _check_directory_json(res, expect_rw_uri): |
---|
3476 | data = simplejson.loads(res) |
---|
3477 | self.failUnlessEqual(data[0], "dirnode") |
---|
3478 | f = data[1]["children"][name] |
---|
3479 | self.failUnlessEqual(f[0], "unknown") |
---|
3480 | if expect_rw_uri: |
---|
3481 | self.failUnlessReallyEqual(to_str(f[1]["rw_uri"]), unknown_rwcap, data) |
---|
3482 | else: |
---|
3483 | self.failIfIn("rw_uri", f[1]) |
---|
3484 | if immutable: |
---|
3485 | self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_immcap, data) |
---|
3486 | else: |
---|
3487 | self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_rocap, data) |
---|
3488 | self.failUnless("metadata" in f[1]) |
---|
3489 | d.addCallback(_check_directory_json, expect_rw_uri=not immutable) |
---|
3490 | |
---|
3491 | def _check_info(res, expect_rw_uri, expect_ro_uri): |
---|
3492 | self.failUnlessIn("Object Type: <span>unknown</span>", res) |
---|
3493 | if expect_rw_uri: |
---|
3494 | self.failUnlessIn(unknown_rwcap, res) |
---|
3495 | if expect_ro_uri: |
---|
3496 | if immutable: |
---|
3497 | self.failUnlessIn(unknown_immcap, res) |
---|
3498 | else: |
---|
3499 | self.failUnlessIn(unknown_rocap, res) |
---|
3500 | else: |
---|
3501 | self.failIfIn(unknown_rocap, res) |
---|
3502 | self.failIfIn("Raw data as", res) |
---|
3503 | self.failIfIn("Directory writecap", res) |
---|
3504 | self.failIfIn("Checker Operations", res) |
---|
3505 | self.failIfIn("Mutable File Operations", res) |
---|
3506 | self.failIfIn("Directory Operations", res) |
---|
3507 | |
---|
3508 | # FIXME: these should have expect_rw_uri=not immutable; I don't know |
---|
3509 | # why they fail. Possibly related to ticket #922. |
---|
3510 | |
---|
3511 | d.addCallback(lambda ign: self.GET(expected_info_url)) |
---|
3512 | d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False) |
---|
3513 | d.addCallback(lambda ign: self.GET("%s%s?t=info" % (self.rooturl, str(name)))) |
---|
3514 | d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True) |
---|
3515 | |
---|
3516 | def _check_json(res, expect_rw_uri): |
---|
3517 | data = simplejson.loads(res) |
---|
3518 | self.failUnlessEqual(data[0], "unknown") |
---|
3519 | if expect_rw_uri: |
---|
3520 | self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), unknown_rwcap, data) |
---|
3521 | else: |
---|
3522 | self.failIfIn("rw_uri", data[1]) |
---|
3523 | |
---|
3524 | if immutable: |
---|
3525 | self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_immcap, data) |
---|
3526 | self.failUnlessReallyEqual(data[1]["mutable"], False) |
---|
3527 | elif expect_rw_uri: |
---|
3528 | self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data) |
---|
3529 | self.failUnlessReallyEqual(data[1]["mutable"], True) |
---|
3530 | else: |
---|
3531 | self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data) |
---|
3532 | self.failIf("mutable" in data[1], data[1]) |
---|
3533 | |
---|
3534 | # TODO: check metadata contents |
---|
3535 | self.failUnless("metadata" in data[1]) |
---|
3536 | |
---|
3537 | d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rooturl, str(name)))) |
---|
3538 | d.addCallback(_check_json, expect_rw_uri=not immutable) |
---|
3539 | |
---|
3540 | # and make sure that a read-only version of the directory can be |
---|
3541 | # rendered too. This version will not have unknown_rwcap, whether |
---|
3542 | # or not future_node was immutable. |
---|
3543 | d.addCallback(lambda ign: self.GET(self.rourl)) |
---|
3544 | if immutable: |
---|
3545 | d.addCallback(_check_directory_html, "-IMM") |
---|
3546 | else: |
---|
3547 | d.addCallback(_check_directory_html, "-RO") |
---|
3548 | |
---|
3549 | d.addCallback(lambda ign: self.GET(self.rourl+"?t=json")) |
---|
3550 | d.addCallback(_check_directory_json, expect_rw_uri=False) |
---|
3551 | |
---|
3552 | d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rourl, str(name)))) |
---|
3553 | d.addCallback(_check_json, expect_rw_uri=False) |
---|
3554 | |
---|
3555 | # TODO: check that getting t=info from the Info link in the ro directory |
---|
3556 | # works, and does not include the writecap URI. |
---|
3557 | return d |
---|
3558 | |
---|
3559 | def test_immutable_unknown(self): |
---|
3560 | return self.test_unknown(immutable=True) |
---|
3561 | |
---|
3562 | def test_mutant_dirnodes_are_omitted(self): |
---|
3563 | self.basedir = "web/Grid/mutant_dirnodes_are_omitted" |
---|
3564 | |
---|
3565 | self.set_up_grid() |
---|
3566 | c = self.g.clients[0] |
---|
3567 | nm = c.nodemaker |
---|
3568 | self.uris = {} |
---|
3569 | self.fileurls = {} |
---|
3570 | |
---|
3571 | lonely_uri = "URI:LIT:n5xgk" # LIT for "one" |
---|
3572 | mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq" |
---|
3573 | mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q" |
---|
3574 | |
---|
3575 | # This method tests mainly dirnode, but we'd have to duplicate code in order to |
---|
3576 | # test the dirnode and web layers separately. |
---|
3577 | |
---|
3578 | # 'lonely' is a valid LIT child, 'ro' is a mutant child with an SSK-RO readcap, |
---|
3579 | # and 'write-in-ro' is a mutant child with an SSK writecap in the ro_uri field. |
---|
3580 | # When the directory is read, the mutants should be silently disposed of, leaving |
---|
3581 | # their lonely sibling. |
---|
3582 | # We don't test the case of a retrieving a cap from the encrypted rw_uri field, |
---|
3583 | # because immutable directories don't have a writecap and therefore that field |
---|
3584 | # isn't (and can't be) decrypted. |
---|
3585 | # TODO: The field still exists in the netstring. Technically we should check what |
---|
3586 | # happens if something is put there (_unpack_contents should raise ValueError), |
---|
3587 | # but that can wait. |
---|
3588 | |
---|
3589 | lonely_child = nm.create_from_cap(lonely_uri) |
---|
3590 | mutant_ro_child = nm.create_from_cap(mut_read_uri) |
---|
3591 | mutant_write_in_ro_child = nm.create_from_cap(mut_write_uri) |
---|
3592 | |
---|
3593 | def _by_hook_or_by_crook(): |
---|
3594 | return True |
---|
3595 | for n in [mutant_ro_child, mutant_write_in_ro_child]: |
---|
3596 | n.is_allowed_in_immutable_directory = _by_hook_or_by_crook |
---|
3597 | |
---|
3598 | mutant_write_in_ro_child.get_write_uri = lambda: None |
---|
3599 | mutant_write_in_ro_child.get_readonly_uri = lambda: mut_write_uri |
---|
3600 | |
---|
3601 | kids = {u"lonely": (lonely_child, {}), |
---|
3602 | u"ro": (mutant_ro_child, {}), |
---|
3603 | u"write-in-ro": (mutant_write_in_ro_child, {}), |
---|
3604 | } |
---|
3605 | d = c.create_immutable_dirnode(kids) |
---|
3606 | |
---|
3607 | def _created(dn): |
---|
3608 | self.failUnless(isinstance(dn, dirnode.DirectoryNode)) |
---|
3609 | self.failIf(dn.is_mutable()) |
---|
3610 | self.failUnless(dn.is_readonly()) |
---|
3611 | # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail. |
---|
3612 | self.failIf(hasattr(dn._node, 'get_writekey')) |
---|
3613 | rep = str(dn) |
---|
3614 | self.failUnless("RO-IMM" in rep) |
---|
3615 | cap = dn.get_cap() |
---|
3616 | self.failUnlessIn("CHK", cap.to_string()) |
---|
3617 | self.cap = cap |
---|
3618 | self.rootnode = dn |
---|
3619 | self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + "/" |
---|
3620 | return download_to_data(dn._node) |
---|
3621 | d.addCallback(_created) |
---|
3622 | |
---|
3623 | def _check_data(data): |
---|
3624 | # Decode the netstring representation of the directory to check that all children |
---|
3625 | # are present. This is a bit of an abstraction violation, but there's not really |
---|
3626 | # any other way to do it given that the real DirectoryNode._unpack_contents would |
---|
3627 | # strip the mutant children out (which is what we're trying to test, later). |
---|
3628 | position = 0 |
---|
3629 | numkids = 0 |
---|
3630 | while position < len(data): |
---|
3631 | entries, position = split_netstring(data, 1, position) |
---|
3632 | entry = entries[0] |
---|
3633 | (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4) |
---|
3634 | name = name_utf8.decode("utf-8") |
---|
3635 | self.failUnless(rwcapdata == "") |
---|
3636 | self.failUnless(name in kids) |
---|
3637 | (expected_child, ign) = kids[name] |
---|
3638 | self.failUnlessReallyEqual(ro_uri, expected_child.get_readonly_uri()) |
---|
3639 | numkids += 1 |
---|
3640 | |
---|
3641 | self.failUnlessReallyEqual(numkids, 3) |
---|
3642 | return self.rootnode.list() |
---|
3643 | d.addCallback(_check_data) |
---|
3644 | |
---|
3645 | # Now when we use the real directory listing code, the mutants should be absent. |
---|
3646 | def _check_kids(children): |
---|
3647 | self.failUnlessReallyEqual(sorted(children.keys()), [u"lonely"]) |
---|
3648 | lonely_node, lonely_metadata = children[u"lonely"] |
---|
3649 | |
---|
3650 | self.failUnlessReallyEqual(lonely_node.get_write_uri(), None) |
---|
3651 | self.failUnlessReallyEqual(lonely_node.get_readonly_uri(), lonely_uri) |
---|
3652 | d.addCallback(_check_kids) |
---|
3653 | |
---|
3654 | d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string())) |
---|
3655 | d.addCallback(lambda n: n.list()) |
---|
3656 | d.addCallback(_check_kids) # again with dirnode recreated from cap |
---|
3657 | |
---|
3658 | # Make sure the lonely child can be listed in HTML... |
---|
3659 | d.addCallback(lambda ign: self.GET(self.rooturl)) |
---|
3660 | def _check_html(res): |
---|
3661 | self.failIfIn("URI:SSK", res) |
---|
3662 | get_lonely = "".join([r'<td>FILE</td>', |
---|
3663 | r'\s+<td>', |
---|
3664 | r'<a href="[^"]+%s[^"]+">lonely</a>' % (urllib.quote(lonely_uri),), |
---|
3665 | r'</td>', |
---|
3666 | r'\s+<td>%d</td>' % len("one"), |
---|
3667 | ]) |
---|
3668 | self.failUnless(re.search(get_lonely, res), res) |
---|
3669 | |
---|
3670 | # find the More Info link for name, should be relative |
---|
3671 | mo = re.search(r'<a href="([^"]+)">More Info</a>', res) |
---|
3672 | info_url = mo.group(1) |
---|
3673 | self.failUnless(info_url.endswith(urllib.quote(lonely_uri) + "?t=info"), info_url) |
---|
3674 | d.addCallback(_check_html) |
---|
3675 | |
---|
3676 | # ... and in JSON. |
---|
3677 | d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json")) |
---|
3678 | def _check_json(res): |
---|
3679 | data = simplejson.loads(res) |
---|
3680 | self.failUnlessEqual(data[0], "dirnode") |
---|
3681 | listed_children = data[1]["children"] |
---|
3682 | self.failUnlessReallyEqual(sorted(listed_children.keys()), [u"lonely"]) |
---|
3683 | ll_type, ll_data = listed_children[u"lonely"] |
---|
3684 | self.failUnlessEqual(ll_type, "filenode") |
---|
3685 | self.failIf("rw_uri" in ll_data) |
---|
3686 | self.failUnlessReallyEqual(to_str(ll_data["ro_uri"]), lonely_uri) |
---|
3687 | d.addCallback(_check_json) |
---|
3688 | return d |
---|
3689 | |
---|
3690 | def test_deep_check(self): |
---|
3691 | self.basedir = "web/Grid/deep_check" |
---|
3692 | self.set_up_grid() |
---|
3693 | c0 = self.g.clients[0] |
---|
3694 | self.uris = {} |
---|
3695 | self.fileurls = {} |
---|
3696 | DATA = "data" * 100 |
---|
3697 | d = c0.create_dirnode() |
---|
3698 | def _stash_root_and_create_file(n): |
---|
3699 | self.rootnode = n |
---|
3700 | self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/" |
---|
3701 | return n.add_file(u"good", upload.Data(DATA, convergence="")) |
---|
3702 | d.addCallback(_stash_root_and_create_file) |
---|
3703 | def _stash_uri(fn, which): |
---|
3704 | self.uris[which] = fn.get_uri() |
---|
3705 | return fn |
---|
3706 | d.addCallback(_stash_uri, "good") |
---|
3707 | d.addCallback(lambda ign: |
---|
3708 | self.rootnode.add_file(u"small", |
---|
3709 | upload.Data("literal", |
---|
3710 | convergence=""))) |
---|
3711 | d.addCallback(_stash_uri, "small") |
---|
3712 | d.addCallback(lambda ign: |
---|
3713 | self.rootnode.add_file(u"sick", |
---|
3714 | upload.Data(DATA+"1", |
---|
3715 | convergence=""))) |
---|
3716 | d.addCallback(_stash_uri, "sick") |
---|
3717 | |
---|
3718 | # this tests that deep-check and stream-manifest will ignore |
---|
3719 | # UnknownNode instances. Hopefully this will also cover deep-stats. |
---|
3720 | future_node = UnknownNode(unknown_rwcap, unknown_rocap) |
---|
3721 | d.addCallback(lambda ign: self.rootnode.set_node(u"future", future_node)) |
---|
3722 | |
---|
3723 | def _clobber_shares(ignored): |
---|
3724 | self.delete_shares_numbered(self.uris["sick"], [0,1]) |
---|
3725 | d.addCallback(_clobber_shares) |
---|
3726 | |
---|
3727 | # root |
---|
3728 | # root/good |
---|
3729 | # root/small |
---|
3730 | # root/sick |
---|
3731 | # root/future |
---|
3732 | |
---|
3733 | d.addCallback(self.CHECK, "root", "t=stream-deep-check") |
---|
3734 | def _done(res): |
---|
3735 | try: |
---|
3736 | units = [simplejson.loads(line) |
---|
3737 | for line in res.splitlines() |
---|
3738 | if line] |
---|
3739 | except ValueError: |
---|
3740 | print "response is:", res |
---|
3741 | print "undecodeable line was '%s'" % line |
---|
3742 | raise |
---|
3743 | self.failUnlessReallyEqual(len(units), 5+1) |
---|
3744 | # should be parent-first |
---|
3745 | u0 = units[0] |
---|
3746 | self.failUnlessEqual(u0["path"], []) |
---|
3747 | self.failUnlessEqual(u0["type"], "directory") |
---|
3748 | self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri()) |
---|
3749 | u0cr = u0["check-results"] |
---|
3750 | self.failUnlessReallyEqual(u0cr["results"]["count-shares-good"], 10) |
---|
3751 | |
---|
3752 | ugood = [u for u in units |
---|
3753 | if u["type"] == "file" and u["path"] == [u"good"]][0] |
---|
3754 | self.failUnlessReallyEqual(to_str(ugood["cap"]), self.uris["good"]) |
---|
3755 | ugoodcr = ugood["check-results"] |
---|
3756 | self.failUnlessReallyEqual(ugoodcr["results"]["count-shares-good"], 10) |
---|
3757 | |
---|
3758 | stats = units[-1] |
---|
3759 | self.failUnlessEqual(stats["type"], "stats") |
---|
3760 | s = stats["stats"] |
---|
3761 | self.failUnlessReallyEqual(s["count-immutable-files"], 2) |
---|
3762 | self.failUnlessReallyEqual(s["count-literal-files"], 1) |
---|
3763 | self.failUnlessReallyEqual(s["count-directories"], 1) |
---|
3764 | self.failUnlessReallyEqual(s["count-unknown"], 1) |
---|
3765 | d.addCallback(_done) |
---|
3766 | |
---|
3767 | d.addCallback(self.CHECK, "root", "t=stream-manifest") |
---|
3768 | def _check_manifest(res): |
---|
3769 | self.failUnless(res.endswith("\n")) |
---|
3770 | units = [simplejson.loads(t) for t in res[:-1].split("\n")] |
---|
3771 | self.failUnlessReallyEqual(len(units), 5+1) |
---|
3772 | self.failUnlessEqual(units[-1]["type"], "stats") |
---|
3773 | first = units[0] |
---|
3774 | self.failUnlessEqual(first["path"], []) |
---|
3775 | self.failUnlessEqual(to_str(first["cap"]), self.rootnode.get_uri()) |
---|
3776 | self.failUnlessEqual(first["type"], "directory") |
---|
3777 | stats = units[-1]["stats"] |
---|
3778 | self.failUnlessReallyEqual(stats["count-immutable-files"], 2) |
---|
3779 | self.failUnlessReallyEqual(stats["count-literal-files"], 1) |
---|
3780 | self.failUnlessReallyEqual(stats["count-mutable-files"], 0) |
---|
3781 | self.failUnlessReallyEqual(stats["count-immutable-files"], 2) |
---|
3782 | self.failUnlessReallyEqual(stats["count-unknown"], 1) |
---|
3783 | d.addCallback(_check_manifest) |
---|
3784 | |
---|
3785 | # now add root/subdir and root/subdir/grandchild, then make subdir |
---|
3786 | # unrecoverable, then see what happens |
---|
3787 | |
---|
3788 | d.addCallback(lambda ign: |
---|
3789 | self.rootnode.create_subdirectory(u"subdir")) |
---|
3790 | d.addCallback(_stash_uri, "subdir") |
---|
3791 | d.addCallback(lambda subdir_node: |
---|
3792 | subdir_node.add_file(u"grandchild", |
---|
3793 | upload.Data(DATA+"2", |
---|
3794 | convergence=""))) |
---|
3795 | d.addCallback(_stash_uri, "grandchild") |
---|
3796 | |
---|
3797 | d.addCallback(lambda ign: |
---|
3798 | self.delete_shares_numbered(self.uris["subdir"], |
---|
3799 | range(1, 10))) |
---|
3800 | |
---|
3801 | # root |
---|
3802 | # root/good |
---|
3803 | # root/small |
---|
3804 | # root/sick |
---|
3805 | # root/future |
---|
3806 | # root/subdir [unrecoverable] |
---|
3807 | # root/subdir/grandchild |
---|
3808 | |
---|
3809 | # how should a streaming-JSON API indicate fatal error? |
---|
3810 | # answer: emit ERROR: instead of a JSON string |
---|
3811 | |
---|
3812 | d.addCallback(self.CHECK, "root", "t=stream-manifest") |
---|
3813 | def _check_broken_manifest(res): |
---|
3814 | lines = res.splitlines() |
---|
3815 | error_lines = [i |
---|
3816 | for (i,line) in enumerate(lines) |
---|
3817 | if line.startswith("ERROR:")] |
---|
3818 | if not error_lines: |
---|
3819 | self.fail("no ERROR: in output: %s" % (res,)) |
---|
3820 | first_error = error_lines[0] |
---|
3821 | error_line = lines[first_error] |
---|
3822 | error_msg = lines[first_error+1:] |
---|
3823 | error_msg_s = "\n".join(error_msg) + "\n" |
---|
3824 | self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)", |
---|
3825 | error_line) |
---|
3826 | self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback |
---|
3827 | units = [simplejson.loads(line) for line in lines[:first_error]] |
---|
3828 | self.failUnlessReallyEqual(len(units), 6) # includes subdir |
---|
3829 | last_unit = units[-1] |
---|
3830 | self.failUnlessEqual(last_unit["path"], ["subdir"]) |
---|
3831 | d.addCallback(_check_broken_manifest) |
---|
3832 | |
---|
3833 | d.addCallback(self.CHECK, "root", "t=stream-deep-check") |
---|
3834 | def _check_broken_deepcheck(res): |
---|
3835 | lines = res.splitlines() |
---|
3836 | error_lines = [i |
---|
3837 | for (i,line) in enumerate(lines) |
---|
3838 | if line.startswith("ERROR:")] |
---|
3839 | if not error_lines: |
---|
3840 | self.fail("no ERROR: in output: %s" % (res,)) |
---|
3841 | first_error = error_lines[0] |
---|
3842 | error_line = lines[first_error] |
---|
3843 | error_msg = lines[first_error+1:] |
---|
3844 | error_msg_s = "\n".join(error_msg) + "\n" |
---|
3845 | self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)", |
---|
3846 | error_line) |
---|
3847 | self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback |
---|
3848 | units = [simplejson.loads(line) for line in lines[:first_error]] |
---|
3849 | self.failUnlessReallyEqual(len(units), 6) # includes subdir |
---|
3850 | last_unit = units[-1] |
---|
3851 | self.failUnlessEqual(last_unit["path"], ["subdir"]) |
---|
3852 | r = last_unit["check-results"]["results"] |
---|
3853 | self.failUnlessReallyEqual(r["count-recoverable-versions"], 0) |
---|
3854 | self.failUnlessReallyEqual(r["count-shares-good"], 1) |
---|
3855 | self.failUnlessReallyEqual(r["recoverable"], False) |
---|
3856 | d.addCallback(_check_broken_deepcheck) |
---|
3857 | |
---|
3858 | d.addErrback(self.explain_web_error) |
---|
3859 | return d |
---|
3860 | |
---|
3861 | def test_deep_check_and_repair(self): |
---|
3862 | self.basedir = "web/Grid/deep_check_and_repair" |
---|
3863 | self.set_up_grid() |
---|
3864 | c0 = self.g.clients[0] |
---|
3865 | self.uris = {} |
---|
3866 | self.fileurls = {} |
---|
3867 | DATA = "data" * 100 |
---|
3868 | d = c0.create_dirnode() |
---|
3869 | def _stash_root_and_create_file(n): |
---|
3870 | self.rootnode = n |
---|
3871 | self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/" |
---|
3872 | return n.add_file(u"good", upload.Data(DATA, convergence="")) |
---|
3873 | d.addCallback(_stash_root_and_create_file) |
---|
3874 | def _stash_uri(fn, which): |
---|
3875 | self.uris[which] = fn.get_uri() |
---|
3876 | d.addCallback(_stash_uri, "good") |
---|
3877 | d.addCallback(lambda ign: |
---|
3878 | self.rootnode.add_file(u"small", |
---|
3879 | upload.Data("literal", |
---|
3880 | convergence=""))) |
---|
3881 | d.addCallback(_stash_uri, "small") |
---|
3882 | d.addCallback(lambda ign: |
---|
3883 | self.rootnode.add_file(u"sick", |
---|
3884 | upload.Data(DATA+"1", |
---|
3885 | convergence=""))) |
---|
3886 | d.addCallback(_stash_uri, "sick") |
---|
3887 | #d.addCallback(lambda ign: |
---|
3888 | # self.rootnode.add_file(u"dead", |
---|
3889 | # upload.Data(DATA+"2", |
---|
3890 | # convergence=""))) |
---|
3891 | #d.addCallback(_stash_uri, "dead") |
---|
3892 | |
---|
3893 | #d.addCallback(lambda ign: c0.create_mutable_file("mutable")) |
---|
3894 | #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn)) |
---|
3895 | #d.addCallback(_stash_uri, "corrupt") |
---|
3896 | |
---|
3897 | def _clobber_shares(ignored): |
---|
3898 | good_shares = self.find_uri_shares(self.uris["good"]) |
---|
3899 | self.failUnlessReallyEqual(len(good_shares), 10) |
---|
3900 | sick_shares = self.find_uri_shares(self.uris["sick"]) |
---|
3901 | os.unlink(sick_shares[0][2]) |
---|
3902 | #dead_shares = self.find_uri_shares(self.uris["dead"]) |
---|
3903 | #for i in range(1, 10): |
---|
3904 | # os.unlink(dead_shares[i][2]) |
---|
3905 | |
---|
3906 | #c_shares = self.find_uri_shares(self.uris["corrupt"]) |
---|
3907 | #cso = CorruptShareOptions() |
---|
3908 | #cso.stdout = StringIO() |
---|
3909 | #cso.parseOptions([c_shares[0][2]]) |
---|
3910 | #corrupt_share(cso) |
---|
3911 | d.addCallback(_clobber_shares) |
---|
3912 | |
---|
3913 | # root |
---|
3914 | # root/good CHK, 10 shares |
---|
3915 | # root/small LIT |
---|
3916 | # root/sick CHK, 9 shares |
---|
3917 | |
---|
3918 | d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true") |
---|
3919 | def _done(res): |
---|
3920 | units = [simplejson.loads(line) |
---|
3921 | for line in res.splitlines() |
---|
3922 | if line] |
---|
3923 | self.failUnlessReallyEqual(len(units), 4+1) |
---|
3924 | # should be parent-first |
---|
3925 | u0 = units[0] |
---|
3926 | self.failUnlessEqual(u0["path"], []) |
---|
3927 | self.failUnlessEqual(u0["type"], "directory") |
---|
3928 | self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri()) |
---|
3929 | u0crr = u0["check-and-repair-results"] |
---|
3930 | self.failUnlessReallyEqual(u0crr["repair-attempted"], False) |
---|
3931 | self.failUnlessReallyEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10) |
---|
3932 | |
---|
3933 | ugood = [u for u in units |
---|
3934 | if u["type"] == "file" and u["path"] == [u"good"]][0] |
---|
3935 | self.failUnlessEqual(to_str(ugood["cap"]), self.uris["good"]) |
---|
3936 | ugoodcrr = ugood["check-and-repair-results"] |
---|
3937 | self.failUnlessReallyEqual(ugoodcrr["repair-attempted"], False) |
---|
3938 | self.failUnlessReallyEqual(ugoodcrr["pre-repair-results"]["results"]["count-shares-good"], 10) |
---|
3939 | |
---|
3940 | usick = [u for u in units |
---|
3941 | if u["type"] == "file" and u["path"] == [u"sick"]][0] |
---|
3942 | self.failUnlessReallyEqual(to_str(usick["cap"]), self.uris["sick"]) |
---|
3943 | usickcrr = usick["check-and-repair-results"] |
---|
3944 | self.failUnlessReallyEqual(usickcrr["repair-attempted"], True) |
---|
3945 | self.failUnlessReallyEqual(usickcrr["repair-successful"], True) |
---|
3946 | self.failUnlessReallyEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9) |
---|
3947 | self.failUnlessReallyEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10) |
---|
3948 | |
---|
3949 | stats = units[-1] |
---|
3950 | self.failUnlessEqual(stats["type"], "stats") |
---|
3951 | s = stats["stats"] |
---|
3952 | self.failUnlessReallyEqual(s["count-immutable-files"], 2) |
---|
3953 | self.failUnlessReallyEqual(s["count-literal-files"], 1) |
---|
3954 | self.failUnlessReallyEqual(s["count-directories"], 1) |
---|
3955 | d.addCallback(_done) |
---|
3956 | |
---|
3957 | d.addErrback(self.explain_web_error) |
---|
3958 | return d |
---|
3959 | |
---|
3960 | def _count_leases(self, ignored, which): |
---|
3961 | u = self.uris[which] |
---|
3962 | shares = self.find_uri_shares(u) |
---|
3963 | lease_counts = [] |
---|
3964 | for shnum, serverid, fn in shares: |
---|
3965 | sf = get_share_file(fn) |
---|
3966 | num_leases = len(list(sf.get_leases())) |
---|
3967 | lease_counts.append( (fn, num_leases) ) |
---|
3968 | return lease_counts |
---|
3969 | |
---|
3970 | def _assert_leasecount(self, lease_counts, expected): |
---|
3971 | for (fn, num_leases) in lease_counts: |
---|
3972 | if num_leases != expected: |
---|
3973 | self.fail("expected %d leases, have %d, on %s" % |
---|
3974 | (expected, num_leases, fn)) |
---|
3975 | |
---|
3976 | def test_add_lease(self): |
---|
3977 | self.basedir = "web/Grid/add_lease" |
---|
3978 | self.set_up_grid(num_clients=2) |
---|
3979 | c0 = self.g.clients[0] |
---|
3980 | self.uris = {} |
---|
3981 | DATA = "data" * 100 |
---|
3982 | d = c0.upload(upload.Data(DATA, convergence="")) |
---|
3983 | def _stash_uri(ur, which): |
---|
3984 | self.uris[which] = ur.uri |
---|
3985 | d.addCallback(_stash_uri, "one") |
---|
3986 | d.addCallback(lambda ign: |
---|
3987 | c0.upload(upload.Data(DATA+"1", convergence=""))) |
---|
3988 | d.addCallback(_stash_uri, "two") |
---|
3989 | def _stash_mutable_uri(n, which): |
---|
3990 | self.uris[which] = n.get_uri() |
---|
3991 | assert isinstance(self.uris[which], str) |
---|
3992 | d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2")) |
---|
3993 | d.addCallback(_stash_mutable_uri, "mutable") |
---|
3994 | |
---|
3995 | def _compute_fileurls(ignored): |
---|
3996 | self.fileurls = {} |
---|
3997 | for which in self.uris: |
---|
3998 | self.fileurls[which] = "uri/" + urllib.quote(self.uris[which]) |
---|
3999 | d.addCallback(_compute_fileurls) |
---|
4000 | |
---|
4001 | d.addCallback(self._count_leases, "one") |
---|
4002 | d.addCallback(self._assert_leasecount, 1) |
---|
4003 | d.addCallback(self._count_leases, "two") |
---|
4004 | d.addCallback(self._assert_leasecount, 1) |
---|
4005 | d.addCallback(self._count_leases, "mutable") |
---|
4006 | d.addCallback(self._assert_leasecount, 1) |
---|
4007 | |
---|
4008 | d.addCallback(self.CHECK, "one", "t=check") # no add-lease |
---|
4009 | def _got_html_good(res): |
---|
4010 | self.failUnless("Healthy" in res, res) |
---|
4011 | self.failIf("Not Healthy" in res, res) |
---|
4012 | d.addCallback(_got_html_good) |
---|
4013 | |
---|
4014 | d.addCallback(self._count_leases, "one") |
---|
4015 | d.addCallback(self._assert_leasecount, 1) |
---|
4016 | d.addCallback(self._count_leases, "two") |
---|
4017 | d.addCallback(self._assert_leasecount, 1) |
---|
4018 | d.addCallback(self._count_leases, "mutable") |
---|
4019 | d.addCallback(self._assert_leasecount, 1) |
---|
4020 | |
---|
4021 | # this CHECK uses the original client, which uses the same |
---|
4022 | # lease-secrets, so it will just renew the original lease |
---|
4023 | d.addCallback(self.CHECK, "one", "t=check&add-lease=true") |
---|
4024 | d.addCallback(_got_html_good) |
---|
4025 | |
---|
4026 | d.addCallback(self._count_leases, "one") |
---|
4027 | d.addCallback(self._assert_leasecount, 1) |
---|
4028 | d.addCallback(self._count_leases, "two") |
---|
4029 | d.addCallback(self._assert_leasecount, 1) |
---|
4030 | d.addCallback(self._count_leases, "mutable") |
---|
4031 | d.addCallback(self._assert_leasecount, 1) |
---|
4032 | |
---|
4033 | # this CHECK uses an alternate client, which adds a second lease |
---|
4034 | d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1) |
---|
4035 | d.addCallback(_got_html_good) |
---|
4036 | |
---|
4037 | d.addCallback(self._count_leases, "one") |
---|
4038 | d.addCallback(self._assert_leasecount, 2) |
---|
4039 | d.addCallback(self._count_leases, "two") |
---|
4040 | d.addCallback(self._assert_leasecount, 1) |
---|
4041 | d.addCallback(self._count_leases, "mutable") |
---|
4042 | d.addCallback(self._assert_leasecount, 1) |
---|
4043 | |
---|
4044 | d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true") |
---|
4045 | d.addCallback(_got_html_good) |
---|
4046 | |
---|
4047 | d.addCallback(self._count_leases, "one") |
---|
4048 | d.addCallback(self._assert_leasecount, 2) |
---|
4049 | d.addCallback(self._count_leases, "two") |
---|
4050 | d.addCallback(self._assert_leasecount, 1) |
---|
4051 | d.addCallback(self._count_leases, "mutable") |
---|
4052 | d.addCallback(self._assert_leasecount, 1) |
---|
4053 | |
---|
4054 | d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true", |
---|
4055 | clientnum=1) |
---|
4056 | d.addCallback(_got_html_good) |
---|
4057 | |
---|
4058 | d.addCallback(self._count_leases, "one") |
---|
4059 | d.addCallback(self._assert_leasecount, 2) |
---|
4060 | d.addCallback(self._count_leases, "two") |
---|
4061 | d.addCallback(self._assert_leasecount, 1) |
---|
4062 | d.addCallback(self._count_leases, "mutable") |
---|
4063 | d.addCallback(self._assert_leasecount, 2) |
---|
4064 | |
---|
4065 | d.addErrback(self.explain_web_error) |
---|
4066 | return d |
---|
4067 | |
---|
4068 | def test_deep_add_lease(self): |
---|
4069 | self.basedir = "web/Grid/deep_add_lease" |
---|
4070 | self.set_up_grid(num_clients=2) |
---|
4071 | c0 = self.g.clients[0] |
---|
4072 | self.uris = {} |
---|
4073 | self.fileurls = {} |
---|
4074 | DATA = "data" * 100 |
---|
4075 | d = c0.create_dirnode() |
---|
4076 | def _stash_root_and_create_file(n): |
---|
4077 | self.rootnode = n |
---|
4078 | self.uris["root"] = n.get_uri() |
---|
4079 | self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/" |
---|
4080 | return n.add_file(u"one", upload.Data(DATA, convergence="")) |
---|
4081 | d.addCallback(_stash_root_and_create_file) |
---|
4082 | def _stash_uri(fn, which): |
---|
4083 | self.uris[which] = fn.get_uri() |
---|
4084 | d.addCallback(_stash_uri, "one") |
---|
4085 | d.addCallback(lambda ign: |
---|
4086 | self.rootnode.add_file(u"small", |
---|
4087 | upload.Data("literal", |
---|
4088 | convergence=""))) |
---|
4089 | d.addCallback(_stash_uri, "small") |
---|
4090 | |
---|
4091 | d.addCallback(lambda ign: c0.create_mutable_file("mutable")) |
---|
4092 | d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn)) |
---|
4093 | d.addCallback(_stash_uri, "mutable") |
---|
4094 | |
---|
4095 | d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease |
---|
4096 | def _done(res): |
---|
4097 | units = [simplejson.loads(line) |
---|
4098 | for line in res.splitlines() |
---|
4099 | if line] |
---|
4100 | # root, one, small, mutable, stats |
---|
4101 | self.failUnlessReallyEqual(len(units), 4+1) |
---|
4102 | d.addCallback(_done) |
---|
4103 | |
---|
4104 | d.addCallback(self._count_leases, "root") |
---|
4105 | d.addCallback(self._assert_leasecount, 1) |
---|
4106 | d.addCallback(self._count_leases, "one") |
---|
4107 | d.addCallback(self._assert_leasecount, 1) |
---|
4108 | d.addCallback(self._count_leases, "mutable") |
---|
4109 | d.addCallback(self._assert_leasecount, 1) |
---|
4110 | |
---|
4111 | d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true") |
---|
4112 | d.addCallback(_done) |
---|
4113 | |
---|
4114 | d.addCallback(self._count_leases, "root") |
---|
4115 | d.addCallback(self._assert_leasecount, 1) |
---|
4116 | d.addCallback(self._count_leases, "one") |
---|
4117 | d.addCallback(self._assert_leasecount, 1) |
---|
4118 | d.addCallback(self._count_leases, "mutable") |
---|
4119 | d.addCallback(self._assert_leasecount, 1) |
---|
4120 | |
---|
4121 | d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true", |
---|
4122 | clientnum=1) |
---|
4123 | d.addCallback(_done) |
---|
4124 | |
---|
4125 | d.addCallback(self._count_leases, "root") |
---|
4126 | d.addCallback(self._assert_leasecount, 2) |
---|
4127 | d.addCallback(self._count_leases, "one") |
---|
4128 | d.addCallback(self._assert_leasecount, 2) |
---|
4129 | d.addCallback(self._count_leases, "mutable") |
---|
4130 | d.addCallback(self._assert_leasecount, 2) |
---|
4131 | |
---|
4132 | d.addErrback(self.explain_web_error) |
---|
4133 | return d |
---|
4134 | |
---|
4135 | |
---|
4136 | def test_exceptions(self): |
---|
4137 | self.basedir = "web/Grid/exceptions" |
---|
4138 | self.set_up_grid(num_clients=1, num_servers=2) |
---|
4139 | c0 = self.g.clients[0] |
---|
4140 | c0.DEFAULT_ENCODING_PARAMETERS['happy'] = 2 |
---|
4141 | self.fileurls = {} |
---|
4142 | DATA = "data" * 100 |
---|
4143 | d = c0.create_dirnode() |
---|
4144 | def _stash_root(n): |
---|
4145 | self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/" |
---|
4146 | self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary" |
---|
4147 | return n |
---|
4148 | d.addCallback(_stash_root) |
---|
4149 | d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence=""))) |
---|
4150 | def _stash_bad(ur): |
---|
4151 | self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri) |
---|
4152 | self.delete_shares_numbered(ur.uri, range(1,10)) |
---|
4153 | |
---|
4154 | u = uri.from_string(ur.uri) |
---|
4155 | u.key = testutil.flip_bit(u.key, 0) |
---|
4156 | baduri = u.to_string() |
---|
4157 | self.fileurls["0shares"] = "uri/" + urllib.quote(baduri) |
---|
4158 | d.addCallback(_stash_bad) |
---|
4159 | d.addCallback(lambda ign: c0.create_dirnode()) |
---|
4160 | def _mangle_dirnode_1share(n): |
---|
4161 | u = n.get_uri() |
---|
4162 | url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/" |
---|
4163 | self.fileurls["dir-1share-json"] = url + "?t=json" |
---|
4164 | self.delete_shares_numbered(u, range(1,10)) |
---|
4165 | d.addCallback(_mangle_dirnode_1share) |
---|
4166 | d.addCallback(lambda ign: c0.create_dirnode()) |
---|
4167 | def _mangle_dirnode_0share(n): |
---|
4168 | u = n.get_uri() |
---|
4169 | url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/" |
---|
4170 | self.fileurls["dir-0share-json"] = url + "?t=json" |
---|
4171 | self.delete_shares_numbered(u, range(0,10)) |
---|
4172 | d.addCallback(_mangle_dirnode_0share) |
---|
4173 | |
---|
4174 | # NotEnoughSharesError should be reported sensibly, with a |
---|
4175 | # text/plain explanation of the problem, and perhaps some |
---|
4176 | # information on which shares *could* be found. |
---|
4177 | |
---|
4178 | d.addCallback(lambda ignored: |
---|
4179 | self.shouldHTTPError("GET unrecoverable", |
---|
4180 | 410, "Gone", "NoSharesError", |
---|
4181 | self.GET, self.fileurls["0shares"])) |
---|
4182 | def _check_zero_shares(body): |
---|
4183 | self.failIf("<html>" in body, body) |
---|
4184 | body = " ".join(body.strip().split()) |
---|
4185 | exp = ("NoSharesError: no shares could be found. " |
---|
4186 | "Zero shares usually indicates a corrupt URI, or that " |
---|
4187 | "no servers were connected, but it might also indicate " |
---|
4188 | "severe corruption. You should perform a filecheck on " |
---|
4189 | "this object to learn more. The full error message is: " |
---|
4190 | "Failed to get enough shareholders: have 0, need 3") |
---|
4191 | self.failUnlessReallyEqual(exp, body) |
---|
4192 | d.addCallback(_check_zero_shares) |
---|
4193 | |
---|
4194 | |
---|
4195 | d.addCallback(lambda ignored: |
---|
4196 | self.shouldHTTPError("GET 1share", |
---|
4197 | 410, "Gone", "NotEnoughSharesError", |
---|
4198 | self.GET, self.fileurls["1share"])) |
---|
4199 | def _check_one_share(body): |
---|
4200 | self.failIf("<html>" in body, body) |
---|
4201 | body = " ".join(body.strip().split()) |
---|
4202 | exp = ("NotEnoughSharesError: This indicates that some " |
---|
4203 | "servers were unavailable, or that shares have been " |
---|
4204 | "lost to server departure, hard drive failure, or disk " |
---|
4205 | "corruption. You should perform a filecheck on " |
---|
4206 | "this object to learn more. The full error message is:" |
---|
4207 | " Failed to get enough shareholders: have 1, need 3") |
---|
4208 | self.failUnlessReallyEqual(exp, body) |
---|
4209 | d.addCallback(_check_one_share) |
---|
4210 | |
---|
4211 | d.addCallback(lambda ignored: |
---|
4212 | self.shouldHTTPError("GET imaginary", |
---|
4213 | 404, "Not Found", None, |
---|
4214 | self.GET, self.fileurls["imaginary"])) |
---|
4215 | def _missing_child(body): |
---|
4216 | self.failUnless("No such child: imaginary" in body, body) |
---|
4217 | d.addCallback(_missing_child) |
---|
4218 | |
---|
4219 | d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"])) |
---|
4220 | def _check_0shares_dir_html(body): |
---|
4221 | self.failUnless("<html>" in body, body) |
---|
4222 | # we should see the regular page, but without the child table or |
---|
4223 | # the dirops forms |
---|
4224 | body = " ".join(body.strip().split()) |
---|
4225 | self.failUnlessIn('href="?t=info">More info on this directory', |
---|
4226 | body) |
---|
4227 | exp = ("UnrecoverableFileError: the directory (or mutable file) " |
---|
4228 | "could not be retrieved, because there were insufficient " |
---|
4229 | "good shares. This might indicate that no servers were " |
---|
4230 | "connected, insufficient servers were connected, the URI " |
---|
4231 | "was corrupt, or that shares have been lost due to server " |
---|
4232 | "departure, hard drive failure, or disk corruption. You " |
---|
4233 | "should perform a filecheck on this object to learn more.") |
---|
4234 | self.failUnlessIn(exp, body) |
---|
4235 | self.failUnlessIn("No upload forms: directory is unreadable", body) |
---|
4236 | d.addCallback(_check_0shares_dir_html) |
---|
4237 | |
---|
4238 | d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"])) |
---|
4239 | def _check_1shares_dir_html(body): |
---|
4240 | # at some point, we'll split UnrecoverableFileError into 0-shares |
---|
4241 | # and some-shares like we did for immutable files (since there |
---|
4242 | # are different sorts of advice to offer in each case). For now, |
---|
4243 | # they present the same way. |
---|
4244 | self.failUnless("<html>" in body, body) |
---|
4245 | body = " ".join(body.strip().split()) |
---|
4246 | self.failUnlessIn('href="?t=info">More info on this directory', |
---|
4247 | body) |
---|
4248 | exp = ("UnrecoverableFileError: the directory (or mutable file) " |
---|
4249 | "could not be retrieved, because there were insufficient " |
---|
4250 | "good shares. This might indicate that no servers were " |
---|
4251 | "connected, insufficient servers were connected, the URI " |
---|
4252 | "was corrupt, or that shares have been lost due to server " |
---|
4253 | "departure, hard drive failure, or disk corruption. You " |
---|
4254 | "should perform a filecheck on this object to learn more.") |
---|
4255 | self.failUnlessIn(exp, body) |
---|
4256 | self.failUnlessIn("No upload forms: directory is unreadable", body) |
---|
4257 | d.addCallback(_check_1shares_dir_html) |
---|
4258 | |
---|
4259 | d.addCallback(lambda ignored: |
---|
4260 | self.shouldHTTPError("GET dir-0share-json", |
---|
4261 | 410, "Gone", "UnrecoverableFileError", |
---|
4262 | self.GET, |
---|
4263 | self.fileurls["dir-0share-json"])) |
---|
4264 | def _check_unrecoverable_file(body): |
---|
4265 | self.failIf("<html>" in body, body) |
---|
4266 | body = " ".join(body.strip().split()) |
---|
4267 | exp = ("UnrecoverableFileError: the directory (or mutable file) " |
---|
4268 | "could not be retrieved, because there were insufficient " |
---|
4269 | "good shares. This might indicate that no servers were " |
---|
4270 | "connected, insufficient servers were connected, the URI " |
---|
4271 | "was corrupt, or that shares have been lost due to server " |
---|
4272 | "departure, hard drive failure, or disk corruption. You " |
---|
4273 | "should perform a filecheck on this object to learn more.") |
---|
4274 | self.failUnlessReallyEqual(exp, body) |
---|
4275 | d.addCallback(_check_unrecoverable_file) |
---|
4276 | |
---|
4277 | d.addCallback(lambda ignored: |
---|
4278 | self.shouldHTTPError("GET dir-1share-json", |
---|
4279 | 410, "Gone", "UnrecoverableFileError", |
---|
4280 | self.GET, |
---|
4281 | self.fileurls["dir-1share-json"])) |
---|
4282 | d.addCallback(_check_unrecoverable_file) |
---|
4283 | |
---|
4284 | d.addCallback(lambda ignored: |
---|
4285 | self.shouldHTTPError("GET imaginary", |
---|
4286 | 404, "Not Found", None, |
---|
4287 | self.GET, self.fileurls["imaginary"])) |
---|
4288 | |
---|
4289 | # attach a webapi child that throws a random error, to test how it |
---|
4290 | # gets rendered. |
---|
4291 | w = c0.getServiceNamed("webish") |
---|
4292 | w.root.putChild("ERRORBOOM", ErrorBoom()) |
---|
4293 | |
---|
4294 | # "Accept: */*" : should get a text/html stack trace |
---|
4295 | # "Accept: text/plain" : should get a text/plain stack trace |
---|
4296 | # "Accept: text/plain, application/octet-stream" : text/plain (CLI) |
---|
4297 | # no Accept header: should get a text/html stack trace |
---|
4298 | |
---|
4299 | d.addCallback(lambda ignored: |
---|
4300 | self.shouldHTTPError("GET errorboom_html", |
---|
4301 | 500, "Internal Server Error", None, |
---|
4302 | self.GET, "ERRORBOOM", |
---|
4303 | headers={"accept": ["*/*"]})) |
---|
4304 | def _internal_error_html1(body): |
---|
4305 | self.failUnless("<html>" in body, "expected HTML, not '%s'" % body) |
---|
4306 | d.addCallback(_internal_error_html1) |
---|
4307 | |
---|
4308 | d.addCallback(lambda ignored: |
---|
4309 | self.shouldHTTPError("GET errorboom_text", |
---|
4310 | 500, "Internal Server Error", None, |
---|
4311 | self.GET, "ERRORBOOM", |
---|
4312 | headers={"accept": ["text/plain"]})) |
---|
4313 | def _internal_error_text2(body): |
---|
4314 | self.failIf("<html>" in body, body) |
---|
4315 | self.failUnless(body.startswith("Traceback "), body) |
---|
4316 | d.addCallback(_internal_error_text2) |
---|
4317 | |
---|
4318 | CLI_accepts = "text/plain, application/octet-stream" |
---|
4319 | d.addCallback(lambda ignored: |
---|
4320 | self.shouldHTTPError("GET errorboom_text", |
---|
4321 | 500, "Internal Server Error", None, |
---|
4322 | self.GET, "ERRORBOOM", |
---|
4323 | headers={"accept": [CLI_accepts]})) |
---|
4324 | def _internal_error_text3(body): |
---|
4325 | self.failIf("<html>" in body, body) |
---|
4326 | self.failUnless(body.startswith("Traceback "), body) |
---|
4327 | d.addCallback(_internal_error_text3) |
---|
4328 | |
---|
4329 | d.addCallback(lambda ignored: |
---|
4330 | self.shouldHTTPError("GET errorboom_text", |
---|
4331 | 500, "Internal Server Error", None, |
---|
4332 | self.GET, "ERRORBOOM")) |
---|
4333 | def _internal_error_html4(body): |
---|
4334 | self.failUnless("<html>" in body, "expected HTML, not '%s'" % body) |
---|
4335 | d.addCallback(_internal_error_html4) |
---|
4336 | |
---|
4337 | def _flush_errors(res): |
---|
4338 | # Trial: please ignore the CompletelyUnhandledError in the logs |
---|
4339 | self.flushLoggedErrors(CompletelyUnhandledError) |
---|
4340 | return res |
---|
4341 | d.addBoth(_flush_errors) |
---|
4342 | |
---|
4343 | return d |
---|
4344 | |
---|
4345 | class CompletelyUnhandledError(Exception): |
---|
4346 | pass |
---|
4347 | class ErrorBoom(rend.Page): |
---|
4348 | def beforeRender(self, ctx): |
---|
4349 | raise CompletelyUnhandledError("whoops") |
---|