1 | """ |
---|
2 | Ported to Python 3. |
---|
3 | """ |
---|
4 | |
---|
5 | import os.path |
---|
6 | from io import StringIO |
---|
7 | from datetime import timedelta |
---|
8 | import re |
---|
9 | |
---|
10 | from twisted.trial import unittest |
---|
11 | from twisted.python.monkey import MonkeyPatcher |
---|
12 | |
---|
13 | from allmydata.util import fileutil |
---|
14 | from allmydata.util.fileutil import abspath_expanduser_unicode |
---|
15 | from allmydata.util.encodingutil import unicode_to_argv |
---|
16 | from allmydata.util.namespace import Namespace |
---|
17 | from allmydata.scripts import cli, backupdb |
---|
18 | from ..common_util import StallMixin |
---|
19 | from ..no_network import GridTestMixin |
---|
20 | from .common import ( |
---|
21 | CLITestMixin, |
---|
22 | parse_options, |
---|
23 | ) |
---|
24 | |
---|
25 | |
---|
26 | def _unsupported(what): |
---|
27 | return "{} are not supported by Python on this platform.".format(what) |
---|
28 | |
---|
29 | |
---|
30 | class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase): |
---|
31 | |
---|
32 | def writeto(self, path, data): |
---|
33 | full_path = os.path.join(self.basedir, "home", path) |
---|
34 | fileutil.make_dirs(os.path.dirname(full_path)) |
---|
35 | fileutil.write(full_path, data) |
---|
36 | |
---|
37 | def count_output(self, out): |
---|
38 | mo = re.search(r"(\d)+ files uploaded \((\d+) reused\), " |
---|
39 | "(\d)+ files skipped, " |
---|
40 | "(\d+) directories created \((\d+) reused\), " |
---|
41 | "(\d+) directories skipped", out) |
---|
42 | return [int(s) for s in mo.groups()] |
---|
43 | |
---|
44 | def count_output2(self, out): |
---|
45 | mo = re.search(r"(\d)+ files checked, (\d+) directories checked", out) |
---|
46 | return [int(s) for s in mo.groups()] |
---|
47 | |
---|
48 | def progress_output(self, out): |
---|
49 | def parse_timedelta(h, m, s): |
---|
50 | return timedelta(int(h), int(m), int(s)) |
---|
51 | mos = re.findall( |
---|
52 | r"Backing up (\d)+/(\d)+\.\.\. (\d+)h (\d+)m (\d+)s elapsed\.\.\.", |
---|
53 | out, |
---|
54 | ) |
---|
55 | return list( |
---|
56 | (int(progress), int(total), parse_timedelta(h, m, s)) |
---|
57 | for (progress, total, h, m, s) |
---|
58 | in mos |
---|
59 | ) |
---|
60 | |
---|
61 | def test_backup(self): |
---|
62 | self.basedir = "cli/Backup/backup" |
---|
63 | self.set_up_grid(oneshare=True) |
---|
64 | |
---|
65 | # is the backupdb available? If so, we test that a second backup does |
---|
66 | # not create new directories. |
---|
67 | hush = StringIO() |
---|
68 | bdb = backupdb.get_backupdb(os.path.join(self.basedir, "dbtest"), |
---|
69 | hush) |
---|
70 | self.failUnless(bdb) |
---|
71 | |
---|
72 | # create a small local directory with a couple of files |
---|
73 | source = os.path.join(self.basedir, "home") |
---|
74 | fileutil.make_dirs(os.path.join(source, "empty")) |
---|
75 | self.writeto("parent/subdir/foo.txt", "foo") |
---|
76 | self.writeto("parent/subdir/bar.txt", "bar\n" * 1000) |
---|
77 | self.writeto("parent/blah.txt", "blah") |
---|
78 | |
---|
79 | def do_backup(verbose=False): |
---|
80 | cmd = ["backup"] |
---|
81 | if verbose: |
---|
82 | cmd.append("--verbose") |
---|
83 | cmd.append(source) |
---|
84 | cmd.append("tahoe:backups") |
---|
85 | return self.do_cli(*cmd) |
---|
86 | |
---|
87 | d = self.do_cli("create-alias", "tahoe") |
---|
88 | |
---|
89 | d.addCallback(lambda res: do_backup(True)) |
---|
90 | def _check0(args): |
---|
91 | (rc, out, err) = args |
---|
92 | self.assertEqual(len(err), 0, err) |
---|
93 | self.failUnlessReallyEqual(rc, 0) |
---|
94 | ( |
---|
95 | files_uploaded, |
---|
96 | files_reused, |
---|
97 | files_skipped, |
---|
98 | directories_created, |
---|
99 | directories_reused, |
---|
100 | directories_skipped, |
---|
101 | ) = self.count_output(out) |
---|
102 | # foo.txt, bar.txt, blah.txt |
---|
103 | self.failUnlessReallyEqual(files_uploaded, 3) |
---|
104 | self.failUnlessReallyEqual(files_reused, 0) |
---|
105 | self.failUnlessReallyEqual(files_skipped, 0) |
---|
106 | # empty, home, home/parent, home/parent/subdir |
---|
107 | self.failUnlessReallyEqual(directories_created, 4) |
---|
108 | self.failUnlessReallyEqual(directories_reused, 0) |
---|
109 | self.failUnlessReallyEqual(directories_skipped, 0) |
---|
110 | |
---|
111 | # This is the first-upload scenario so there should have been |
---|
112 | # nothing to check. |
---|
113 | (files_checked, directories_checked) = self.count_output2(out) |
---|
114 | self.failUnlessReallyEqual(files_checked, 0) |
---|
115 | self.failUnlessReallyEqual(directories_checked, 0) |
---|
116 | |
---|
117 | progress = self.progress_output(out) |
---|
118 | for left, right in zip(progress[:-1], progress[1:]): |
---|
119 | # Progress as measured by file count should progress |
---|
120 | # monotonically. |
---|
121 | self.assertTrue( |
---|
122 | left[0] < right[0], |
---|
123 | "Failed: {} < {}".format(left[0], right[0]), |
---|
124 | ) |
---|
125 | |
---|
126 | # Total work to do should remain the same. |
---|
127 | self.assertEqual(left[1], right[1]) |
---|
128 | |
---|
129 | # Amount of elapsed time should only go up. Allow it to |
---|
130 | # remain the same to account for resolution of the report. |
---|
131 | self.assertTrue( |
---|
132 | left[2] <= right[2], |
---|
133 | "Failed: {} <= {}".format(left[2], right[2]), |
---|
134 | ) |
---|
135 | |
---|
136 | for element in progress: |
---|
137 | # Can't have more progress than the total. |
---|
138 | self.assertTrue( |
---|
139 | element[0] <= element[1], |
---|
140 | "Failed: {} <= {}".format(element[0], element[1]), |
---|
141 | ) |
---|
142 | |
---|
143 | |
---|
144 | d.addCallback(_check0) |
---|
145 | |
---|
146 | d.addCallback(lambda res: self.do_cli("ls", "--uri", "tahoe:backups")) |
---|
147 | def _check1(args): |
---|
148 | (rc, out, err) = args |
---|
149 | self.assertEqual(len(err), 0, err) |
---|
150 | self.failUnlessReallyEqual(rc, 0) |
---|
151 | lines = out.split("\n") |
---|
152 | children = dict([line.split() for line in lines if line]) |
---|
153 | latest_uri = children["Latest"] |
---|
154 | self.failUnless(latest_uri.startswith("URI:DIR2-CHK:"), latest_uri) |
---|
155 | childnames = list(children.keys()) |
---|
156 | self.failUnlessReallyEqual(sorted(childnames), ["Archives", "Latest"]) |
---|
157 | d.addCallback(_check1) |
---|
158 | d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest")) |
---|
159 | def _check2(args): |
---|
160 | (rc, out, err) = args |
---|
161 | self.assertEqual(len(err), 0, err) |
---|
162 | self.failUnlessReallyEqual(rc, 0) |
---|
163 | self.failUnlessReallyEqual(sorted(out.split()), ["empty", "parent"]) |
---|
164 | d.addCallback(_check2) |
---|
165 | d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest/empty")) |
---|
166 | def _check2a(args): |
---|
167 | (rc, out, err) = args |
---|
168 | self.assertEqual(len(err), 0, err) |
---|
169 | self.failUnlessReallyEqual(rc, 0) |
---|
170 | self.assertFalse(out.strip()) |
---|
171 | d.addCallback(_check2a) |
---|
172 | d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt")) |
---|
173 | def _check3(args): |
---|
174 | (rc, out, err) = args |
---|
175 | self.assertFalse(err) |
---|
176 | self.failUnlessReallyEqual(rc, 0) |
---|
177 | self.assertEqual(out, "foo") |
---|
178 | d.addCallback(_check3) |
---|
179 | d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives")) |
---|
180 | def _check4(args): |
---|
181 | (rc, out, err) = args |
---|
182 | self.assertFalse(err) |
---|
183 | self.failUnlessReallyEqual(rc, 0) |
---|
184 | self.old_archives = out.split() |
---|
185 | self.failUnlessReallyEqual(len(self.old_archives), 1) |
---|
186 | d.addCallback(_check4) |
---|
187 | |
---|
188 | |
---|
189 | d.addCallback(self.stall, 1.1) |
---|
190 | d.addCallback(lambda res: do_backup()) |
---|
191 | def _check4a(args): |
---|
192 | # second backup should reuse everything, if the backupdb is |
---|
193 | # available |
---|
194 | (rc, out, err) = args |
---|
195 | self.assertFalse(err) |
---|
196 | self.failUnlessReallyEqual(rc, 0) |
---|
197 | fu, fr, fs, dc, dr, ds = self.count_output(out) |
---|
198 | # foo.txt, bar.txt, blah.txt |
---|
199 | self.failUnlessReallyEqual(fu, 0) |
---|
200 | self.failUnlessReallyEqual(fr, 3) |
---|
201 | self.failUnlessReallyEqual(fs, 0) |
---|
202 | # empty, home, home/parent, home/parent/subdir |
---|
203 | self.failUnlessReallyEqual(dc, 0) |
---|
204 | self.failUnlessReallyEqual(dr, 4) |
---|
205 | self.failUnlessReallyEqual(ds, 0) |
---|
206 | d.addCallback(_check4a) |
---|
207 | |
---|
208 | # sneak into the backupdb, crank back the "last checked" |
---|
209 | # timestamp to force a check on all files |
---|
210 | def _reset_last_checked(res): |
---|
211 | dbfile = self.get_client_config().get_private_path("backupdb.sqlite") |
---|
212 | self.failUnless(os.path.exists(dbfile), dbfile) |
---|
213 | bdb = backupdb.get_backupdb(dbfile) |
---|
214 | bdb.cursor.execute("UPDATE last_upload SET last_checked=0") |
---|
215 | bdb.cursor.execute("UPDATE directories SET last_checked=0") |
---|
216 | bdb.connection.commit() |
---|
217 | |
---|
218 | d.addCallback(_reset_last_checked) |
---|
219 | |
---|
220 | d.addCallback(self.stall, 1.1) |
---|
221 | d.addCallback(lambda res: do_backup(verbose=True)) |
---|
222 | def _check4b(args): |
---|
223 | # we should check all files, and re-use all of them. None of |
---|
224 | # the directories should have been changed, so we should |
---|
225 | # re-use all of them too. |
---|
226 | (rc, out, err) = args |
---|
227 | self.assertFalse(err) |
---|
228 | self.failUnlessReallyEqual(rc, 0) |
---|
229 | fu, fr, fs, dc, dr, ds = self.count_output(out) |
---|
230 | fchecked, dchecked = self.count_output2(out) |
---|
231 | self.failUnlessReallyEqual(fchecked, 3) |
---|
232 | self.failUnlessReallyEqual(fu, 0) |
---|
233 | self.failUnlessReallyEqual(fr, 3) |
---|
234 | self.failUnlessReallyEqual(fs, 0) |
---|
235 | self.failUnlessReallyEqual(dchecked, 4) |
---|
236 | self.failUnlessReallyEqual(dc, 0) |
---|
237 | self.failUnlessReallyEqual(dr, 4) |
---|
238 | self.failUnlessReallyEqual(ds, 0) |
---|
239 | d.addCallback(_check4b) |
---|
240 | |
---|
241 | d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives")) |
---|
242 | def _check5(args): |
---|
243 | (rc, out, err) = args |
---|
244 | self.assertFalse(err) |
---|
245 | self.failUnlessReallyEqual(rc, 0) |
---|
246 | self.new_archives = out.split() |
---|
247 | self.failUnlessReallyEqual(len(self.new_archives), 3, out) |
---|
248 | # the original backup should still be the oldest (i.e. sorts |
---|
249 | # alphabetically towards the beginning) |
---|
250 | self.failUnlessReallyEqual(sorted(self.new_archives)[0], |
---|
251 | self.old_archives[0]) |
---|
252 | d.addCallback(_check5) |
---|
253 | |
---|
254 | d.addCallback(self.stall, 1.1) |
---|
255 | def _modify(res): |
---|
256 | self.writeto("parent/subdir/foo.txt", "FOOF!") |
---|
257 | # and turn a file into a directory |
---|
258 | os.unlink(os.path.join(source, "parent/blah.txt")) |
---|
259 | os.mkdir(os.path.join(source, "parent/blah.txt")) |
---|
260 | self.writeto("parent/blah.txt/surprise file", "surprise") |
---|
261 | self.writeto("parent/blah.txt/surprisedir/subfile", "surprise") |
---|
262 | # turn a directory into a file |
---|
263 | os.rmdir(os.path.join(source, "empty")) |
---|
264 | self.writeto("empty", "imagine nothing being here") |
---|
265 | return do_backup() |
---|
266 | d.addCallback(_modify) |
---|
267 | def _check5a(args): |
---|
268 | # second backup should reuse bar.txt (if backupdb is available), |
---|
269 | # and upload the rest. None of the directories can be reused. |
---|
270 | (rc, out, err) = args |
---|
271 | self.assertFalse(err) |
---|
272 | self.failUnlessReallyEqual(rc, 0) |
---|
273 | fu, fr, fs, dc, dr, ds = self.count_output(out) |
---|
274 | # new foo.txt, surprise file, subfile, empty |
---|
275 | self.failUnlessReallyEqual(fu, 4) |
---|
276 | # old bar.txt |
---|
277 | self.failUnlessReallyEqual(fr, 1) |
---|
278 | self.failUnlessReallyEqual(fs, 0) |
---|
279 | # home, parent, subdir, blah.txt, surprisedir |
---|
280 | self.failUnlessReallyEqual(dc, 5) |
---|
281 | self.failUnlessReallyEqual(dr, 0) |
---|
282 | self.failUnlessReallyEqual(ds, 0) |
---|
283 | d.addCallback(_check5a) |
---|
284 | d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives")) |
---|
285 | def _check6(args): |
---|
286 | (rc, out, err) = args |
---|
287 | self.assertFalse(err) |
---|
288 | self.failUnlessReallyEqual(rc, 0) |
---|
289 | self.new_archives = out.split() |
---|
290 | self.failUnlessReallyEqual(len(self.new_archives), 4) |
---|
291 | self.failUnlessReallyEqual(sorted(self.new_archives)[0], |
---|
292 | self.old_archives[0]) |
---|
293 | d.addCallback(_check6) |
---|
294 | d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt")) |
---|
295 | def _check7(args): |
---|
296 | (rc, out, err) = args |
---|
297 | self.assertFalse(err) |
---|
298 | self.failUnlessReallyEqual(rc, 0) |
---|
299 | self.assertEqual(out, "FOOF!") |
---|
300 | # the old snapshot should not be modified |
---|
301 | return self.do_cli("get", "tahoe:backups/Archives/%s/parent/subdir/foo.txt" % self.old_archives[0]) |
---|
302 | d.addCallback(_check7) |
---|
303 | def _check8(args): |
---|
304 | (rc, out, err) = args |
---|
305 | self.assertFalse(err) |
---|
306 | self.failUnlessReallyEqual(rc, 0) |
---|
307 | self.assertEqual(out, "foo") |
---|
308 | d.addCallback(_check8) |
---|
309 | |
---|
310 | return d |
---|
311 | |
---|
312 | def _check_filtering(self, filtered, all, included, excluded): |
---|
313 | filtered = set(filtered) |
---|
314 | all = set(all) |
---|
315 | included = set(included) |
---|
316 | excluded = set(excluded) |
---|
317 | self.failUnlessReallyEqual(filtered, included) |
---|
318 | self.failUnlessReallyEqual(all.difference(filtered), excluded) |
---|
319 | |
---|
320 | def test_exclude_options(self): |
---|
321 | root_listdir = (u'lib.a', u'_darcs', u'subdir', u'nice_doc.lyx') |
---|
322 | subdir_listdir = (u'another_doc.lyx', u'run_snake_run.py', u'CVS', u'.svn', u'_darcs') |
---|
323 | basedir = "cli/Backup/exclude_options" |
---|
324 | fileutil.make_dirs(basedir) |
---|
325 | nodeurl_path = os.path.join(basedir, 'node.url') |
---|
326 | fileutil.write(nodeurl_path, 'http://example.net:2357/') |
---|
327 | def parse(args): return parse_options(basedir, "backup", args) |
---|
328 | |
---|
329 | # test simple exclude |
---|
330 | backup_options = parse(['--exclude', '*lyx', 'from', 'to']) |
---|
331 | filtered = list(backup_options.filter_listdir(root_listdir)) |
---|
332 | self._check_filtering(filtered, root_listdir, (u'lib.a', u'_darcs', u'subdir'), |
---|
333 | (u'nice_doc.lyx',)) |
---|
334 | # multiple exclude |
---|
335 | backup_options = parse(['--exclude', '*lyx', '--exclude', 'lib.?', 'from', 'to']) |
---|
336 | filtered = list(backup_options.filter_listdir(root_listdir)) |
---|
337 | self._check_filtering(filtered, root_listdir, (u'_darcs', u'subdir'), |
---|
338 | (u'nice_doc.lyx', u'lib.a')) |
---|
339 | # vcs metadata exclusion |
---|
340 | backup_options = parse(['--exclude-vcs', 'from', 'to']) |
---|
341 | filtered = list(backup_options.filter_listdir(subdir_listdir)) |
---|
342 | self._check_filtering(filtered, subdir_listdir, (u'another_doc.lyx', u'run_snake_run.py',), |
---|
343 | (u'CVS', u'.svn', u'_darcs')) |
---|
344 | # read exclude patterns from file |
---|
345 | exclusion_string = "_darcs\n*py\n.svn" |
---|
346 | excl_filepath = os.path.join(basedir, 'exclusion') |
---|
347 | fileutil.write(excl_filepath, exclusion_string) |
---|
348 | backup_options = parse(['--exclude-from-utf-8', excl_filepath, 'from', 'to']) |
---|
349 | filtered = list(backup_options.filter_listdir(subdir_listdir)) |
---|
350 | self._check_filtering(filtered, subdir_listdir, (u'another_doc.lyx', u'CVS'), |
---|
351 | (u'.svn', u'_darcs', u'run_snake_run.py')) |
---|
352 | # test BackupConfigurationError |
---|
353 | self.failUnlessRaises(cli.BackupConfigurationError, |
---|
354 | parse, |
---|
355 | ['--exclude-from-utf-8', excl_filepath + '.no', 'from', 'to']) |
---|
356 | |
---|
357 | # test that an iterator works too |
---|
358 | backup_options = parse(['--exclude', '*lyx', 'from', 'to']) |
---|
359 | filtered = list(backup_options.filter_listdir(iter(root_listdir))) |
---|
360 | self._check_filtering(filtered, root_listdir, (u'lib.a', u'_darcs', u'subdir'), |
---|
361 | (u'nice_doc.lyx',)) |
---|
362 | |
---|
363 | def test_exclude_options_unicode(self): |
---|
364 | nice_doc = u"nice_d\u00F8c.lyx" |
---|
365 | try: |
---|
366 | doc_pattern_arg_unicode = doc_pattern_arg = u"*d\u00F8c*" |
---|
367 | except UnicodeEncodeError: |
---|
368 | raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.") |
---|
369 | |
---|
370 | root_listdir = (u'lib.a', u'_darcs', u'subdir', nice_doc) |
---|
371 | basedir = "cli/Backup/exclude_options_unicode" |
---|
372 | fileutil.make_dirs(basedir) |
---|
373 | nodeurl_path = os.path.join(basedir, 'node.url') |
---|
374 | fileutil.write(nodeurl_path, 'http://example.net:2357/') |
---|
375 | def parse(args): return parse_options(basedir, "backup", args) |
---|
376 | |
---|
377 | # test simple exclude |
---|
378 | backup_options = parse(['--exclude', doc_pattern_arg, 'from', 'to']) |
---|
379 | filtered = list(backup_options.filter_listdir(root_listdir)) |
---|
380 | self._check_filtering(filtered, root_listdir, (u'lib.a', u'_darcs', u'subdir'), |
---|
381 | (nice_doc,)) |
---|
382 | # multiple exclude |
---|
383 | backup_options = parse(['--exclude', doc_pattern_arg, '--exclude', 'lib.?', 'from', 'to']) |
---|
384 | filtered = list(backup_options.filter_listdir(root_listdir)) |
---|
385 | self._check_filtering(filtered, root_listdir, (u'_darcs', u'subdir'), |
---|
386 | (nice_doc, u'lib.a')) |
---|
387 | # read exclude patterns from file |
---|
388 | exclusion_string = (doc_pattern_arg_unicode + "\nlib.?").encode("utf-8") |
---|
389 | excl_filepath = os.path.join(basedir, 'exclusion') |
---|
390 | fileutil.write(excl_filepath, exclusion_string) |
---|
391 | backup_options = parse(['--exclude-from-utf-8', excl_filepath, 'from', 'to']) |
---|
392 | filtered = list(backup_options.filter_listdir(root_listdir)) |
---|
393 | self._check_filtering(filtered, root_listdir, (u'_darcs', u'subdir'), |
---|
394 | (nice_doc, u'lib.a')) |
---|
395 | |
---|
396 | # test that an iterator works too |
---|
397 | backup_options = parse(['--exclude', doc_pattern_arg, 'from', 'to']) |
---|
398 | filtered = list(backup_options.filter_listdir(iter(root_listdir))) |
---|
399 | self._check_filtering(filtered, root_listdir, (u'lib.a', u'_darcs', u'subdir'), |
---|
400 | (nice_doc,)) |
---|
401 | |
---|
402 | def test_exclude_from_tilde_expansion(self): |
---|
403 | basedir = "cli/Backup/exclude_from_tilde_expansion" |
---|
404 | fileutil.make_dirs(basedir) |
---|
405 | nodeurl_path = os.path.join(basedir, 'node.url') |
---|
406 | fileutil.write(nodeurl_path, 'http://example.net:2357/') |
---|
407 | |
---|
408 | # ensure that tilde expansion is performed on exclude-from argument |
---|
409 | exclude_file = u'~/.tahoe/excludes.dummy' |
---|
410 | |
---|
411 | ns = Namespace() |
---|
412 | ns.called = False |
---|
413 | original_open = open |
---|
414 | def call_file(name, *args, **kwargs): |
---|
415 | if name.endswith("excludes.dummy"): |
---|
416 | ns.called = True |
---|
417 | self.failUnlessEqual(name, abspath_expanduser_unicode(exclude_file)) |
---|
418 | return StringIO() |
---|
419 | else: |
---|
420 | return original_open(name, *args, **kwargs) |
---|
421 | |
---|
422 | import builtins as module_to_patch |
---|
423 | patcher = MonkeyPatcher((module_to_patch, 'open', call_file)) |
---|
424 | patcher.runWithPatches(parse_options, basedir, "backup", ['--exclude-from-utf-8', unicode_to_argv(exclude_file), 'from', 'to']) |
---|
425 | self.failUnless(ns.called) |
---|
426 | |
---|
427 | def test_ignore_symlinks(self): |
---|
428 | """ |
---|
429 | A symlink encountered in the backed-up directory is skipped with a |
---|
430 | warning. |
---|
431 | """ |
---|
432 | if not hasattr(os, 'symlink'): |
---|
433 | raise unittest.SkipTest(_unsupported("Symlinks")) |
---|
434 | |
---|
435 | def make_symlink(path): |
---|
436 | self.writeto("foo.txt", "foo") |
---|
437 | os.symlink( |
---|
438 | os.path.join( |
---|
439 | os.path.dirname(path), |
---|
440 | "foo.txt", |
---|
441 | ), |
---|
442 | path, |
---|
443 | ) |
---|
444 | |
---|
445 | return self._ignore_something_test(u"Symlink", make_symlink) |
---|
446 | |
---|
447 | def test_ignore_fifo(self): |
---|
448 | """ |
---|
449 | A FIFO encountered in the backed-up directory is skipped with a warning. |
---|
450 | """ |
---|
451 | if getattr(os, "mkfifo", None) is None: |
---|
452 | raise unittest.SkipTest(_unsupported("FIFOs")) |
---|
453 | |
---|
454 | def make_fifo(path): |
---|
455 | # Create the thing to ignore |
---|
456 | os.makedirs(os.path.dirname(path)) |
---|
457 | os.mkfifo(path) |
---|
458 | # Also create anothing thing so the counts end up the same as |
---|
459 | # those in the symlink test and it's easier to re-use the testing |
---|
460 | # helper. |
---|
461 | self.writeto("count-dummy.txt", "foo") |
---|
462 | |
---|
463 | return self._ignore_something_test(u"special", make_fifo) |
---|
464 | |
---|
465 | def _ignore_something_test(self, kind_of_thing, make_something_to_ignore): |
---|
466 | """ |
---|
467 | Assert that when a a certain kind of file is encountered in the backed-up |
---|
468 | directory a warning that it is not supported is emitted and the backup |
---|
469 | proceeds to other files with no other error. |
---|
470 | |
---|
471 | :param unicode kind_of_thing: The name of the kind of file that will |
---|
472 | be ignored. This is expected to appear in the warning. |
---|
473 | |
---|
474 | :param make_something_to_ignore: A one-argument callable which creates |
---|
475 | the file that is expected to be ignored. It is called with the |
---|
476 | path at which the file must be created. |
---|
477 | |
---|
478 | :return Deferred: A ``Deferred`` that fires when the assertion has |
---|
479 | been made. |
---|
480 | """ |
---|
481 | self.basedir = os.path.dirname(self.mktemp()) |
---|
482 | self.set_up_grid(oneshare=True) |
---|
483 | |
---|
484 | source = os.path.join(self.basedir, "home") |
---|
485 | ignored_path = os.path.join(source, "foo2.txt") |
---|
486 | make_something_to_ignore(ignored_path) |
---|
487 | |
---|
488 | d = self.do_cli("create-alias", "tahoe") |
---|
489 | d.addCallback(lambda res: self.do_cli("backup", "--verbose", source, "tahoe:test")) |
---|
490 | |
---|
491 | def _check(args): |
---|
492 | (rc, out, err) = args |
---|
493 | self.failUnlessReallyEqual(rc, 2) |
---|
494 | self.assertIn( |
---|
495 | "WARNING: cannot backup {} ".format(kind_of_thing.lower()), |
---|
496 | err, |
---|
497 | ) |
---|
498 | self.assertIn(ignored_path, err) |
---|
499 | |
---|
500 | fu, fr, fs, dc, dr, ds = self.count_output(out) |
---|
501 | # foo.txt |
---|
502 | self.failUnlessReallyEqual(fu, 1) |
---|
503 | self.failUnlessReallyEqual(fr, 0) |
---|
504 | # foo2.txt |
---|
505 | self.failUnlessReallyEqual(fs, 1) |
---|
506 | # home |
---|
507 | self.failUnlessReallyEqual(dc, 1) |
---|
508 | self.failUnlessReallyEqual(dr, 0) |
---|
509 | self.failUnlessReallyEqual(ds, 0) |
---|
510 | |
---|
511 | d.addCallback(_check) |
---|
512 | return d |
---|
513 | |
---|
514 | def test_ignore_unreadable_file(self): |
---|
515 | self.basedir = os.path.dirname(self.mktemp()) |
---|
516 | self.set_up_grid(oneshare=True) |
---|
517 | |
---|
518 | source = os.path.join(self.basedir, "home") |
---|
519 | self.writeto("foo.txt", "foo") |
---|
520 | os.chmod(os.path.join(source, "foo.txt"), 0000) |
---|
521 | |
---|
522 | d = self.do_cli("create-alias", "tahoe") |
---|
523 | d.addCallback(lambda res: self.do_cli("backup", source, "tahoe:test")) |
---|
524 | |
---|
525 | def _check(args): |
---|
526 | (rc, out, err) = args |
---|
527 | self.failUnlessReallyEqual(rc, 2) |
---|
528 | self.failUnlessReallyEqual(err, "WARNING: permission denied on file %s\n" % os.path.join(source, "foo.txt")) |
---|
529 | |
---|
530 | fu, fr, fs, dc, dr, ds = self.count_output(out) |
---|
531 | self.failUnlessReallyEqual(fu, 0) |
---|
532 | self.failUnlessReallyEqual(fr, 0) |
---|
533 | # foo.txt |
---|
534 | self.failUnlessReallyEqual(fs, 1) |
---|
535 | # home |
---|
536 | self.failUnlessReallyEqual(dc, 1) |
---|
537 | self.failUnlessReallyEqual(dr, 0) |
---|
538 | self.failUnlessReallyEqual(ds, 0) |
---|
539 | d.addCallback(_check) |
---|
540 | |
---|
541 | # This is necessary for the temp files to be correctly removed |
---|
542 | def _cleanup(self): |
---|
543 | os.chmod(os.path.join(source, "foo.txt"), 0o644) |
---|
544 | d.addCallback(_cleanup) |
---|
545 | d.addErrback(_cleanup) |
---|
546 | |
---|
547 | return d |
---|
548 | |
---|
549 | def test_ignore_unreadable_directory(self): |
---|
550 | self.basedir = os.path.dirname(self.mktemp()) |
---|
551 | self.set_up_grid(oneshare=True) |
---|
552 | |
---|
553 | source = os.path.join(self.basedir, "home") |
---|
554 | os.mkdir(source) |
---|
555 | os.mkdir(os.path.join(source, "test")) |
---|
556 | os.chmod(os.path.join(source, "test"), 0000) |
---|
557 | |
---|
558 | d = self.do_cli("create-alias", "tahoe") |
---|
559 | d.addCallback(lambda res: self.do_cli("backup", source, "tahoe:test")) |
---|
560 | |
---|
561 | def _check(args): |
---|
562 | (rc, out, err) = args |
---|
563 | self.failUnlessReallyEqual(rc, 2) |
---|
564 | self.failUnlessReallyEqual(err, "WARNING: permission denied on directory %s\n" % os.path.join(source, "test")) |
---|
565 | |
---|
566 | fu, fr, fs, dc, dr, ds = self.count_output(out) |
---|
567 | self.failUnlessReallyEqual(fu, 0) |
---|
568 | self.failUnlessReallyEqual(fr, 0) |
---|
569 | self.failUnlessReallyEqual(fs, 0) |
---|
570 | # home, test |
---|
571 | self.failUnlessReallyEqual(dc, 2) |
---|
572 | self.failUnlessReallyEqual(dr, 0) |
---|
573 | # test |
---|
574 | self.failUnlessReallyEqual(ds, 1) |
---|
575 | d.addCallback(_check) |
---|
576 | |
---|
577 | # This is necessary for the temp files to be correctly removed |
---|
578 | def _cleanup(self): |
---|
579 | os.chmod(os.path.join(source, "test"), 0o655) |
---|
580 | d.addCallback(_cleanup) |
---|
581 | d.addErrback(_cleanup) |
---|
582 | return d |
---|
583 | |
---|
584 | def test_backup_without_alias(self): |
---|
585 | # 'tahoe backup' should output a sensible error message when invoked |
---|
586 | # without an alias instead of a stack trace. |
---|
587 | self.basedir = os.path.dirname(self.mktemp()) |
---|
588 | self.set_up_grid(oneshare=True) |
---|
589 | source = os.path.join(self.basedir, "file1") |
---|
590 | d = self.do_cli('backup', source, source) |
---|
591 | def _check(args): |
---|
592 | (rc, out, err) = args |
---|
593 | self.failUnlessReallyEqual(rc, 1) |
---|
594 | self.failUnlessIn("error:", err) |
---|
595 | self.assertEqual(len(out), 0) |
---|
596 | d.addCallback(_check) |
---|
597 | return d |
---|
598 | |
---|
599 | def test_backup_with_nonexistent_alias(self): |
---|
600 | # 'tahoe backup' should output a sensible error message when invoked |
---|
601 | # with a nonexistent alias. |
---|
602 | self.basedir = os.path.dirname(self.mktemp()) |
---|
603 | self.set_up_grid(oneshare=True) |
---|
604 | source = os.path.join(self.basedir, "file1") |
---|
605 | d = self.do_cli("backup", source, "nonexistent:" + source) |
---|
606 | def _check(args): |
---|
607 | (rc, out, err) = args |
---|
608 | self.failUnlessReallyEqual(rc, 1) |
---|
609 | self.failUnlessIn("error:", err) |
---|
610 | self.failUnlessIn("nonexistent", err) |
---|
611 | self.assertEqual(len(out), 0) |
---|
612 | d.addCallback(_check) |
---|
613 | return d |
---|