-
-
Notifications
You must be signed in to change notification settings - Fork 844
Expand file tree
/
Copy patharchive_test.py
More file actions
567 lines (459 loc) · 18.1 KB
/
archive_test.py
File metadata and controls
567 lines (459 loc) · 18.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
import json
import os
from collections import OrderedDict
from datetime import datetime, timezone
from io import StringIO
from unittest.mock import Mock
import pytest
from . import rejected_dotdot_paths
from ..crypto.key import PlaintextKey
from ..archive import Archive, CacheChunkBuffer, RobustUnpacker, valid_msgpacked_dict, ITEM_KEYS, Statistics
from ..archive import BackupOSError, backup_io, backup_io_iter, get_item_uid_gid
from ..helpers import msgpack
from ..item import Item, ArchiveItem, ChunkListEntry
from ..manifest import Manifest
from ..platform import uid2user, gid2group, is_win32
@pytest.fixture()
def stats():
stats = Statistics()
stats.update(20, unique=True)
stats.nfiles = 1
return stats
def test_stats_basic(stats):
assert stats.osize == 20
assert stats.usize == 20
stats.update(20, unique=False)
assert stats.osize == 40
assert stats.usize == 20
@pytest.mark.parametrize(
"item_path, update_size, expected_output",
[
("", 0, "20 B O 20 B U 1 N "), # test unchanged 'stats' fixture
("foo", 10**3, "1.02 kB O 20 B U 1 N foo"), # test updated original size and set item path
# test long item path which exceeds 80 characters
("foo" * 40, 10**3, "1.02 kB O 20 B U 1 N foofoofoofoofoofoofoofoofo...foofoofoofoofoofoofoofoofoofoo"),
],
)
def test_stats_progress(item_path, update_size, expected_output, stats, monkeypatch, columns=80):
monkeypatch.setenv("COLUMNS", str(columns))
out = StringIO()
item = Item(path=item_path) if item_path else None
s = expected_output
stats.update(update_size, unique=False)
stats.show_progress(item=item, stream=out)
buf = " " * (columns - len(s))
assert out.getvalue() == s + buf + "\r"
def test_stats_format(stats):
assert (
str(stats)
== """\
Number of files: 1
Original size: 20 B
Deduplicated size: 20 B
Time spent in hashing: 0.000 seconds
Time spent in chunking: 0.000 seconds
Added files: 0
Unchanged files: 0
Modified files: 0
Error files: 0
Files changed while reading: 0
Bytes read from remote: 0
Bytes sent to remote: 0
"""
)
s = f"{stats.osize_fmt}"
assert s == "20 B"
# kind of redundant, but id is variable so we can't match reliably
assert repr(stats) == f"<Statistics object at {id(stats):#x} (20, 20)>"
def test_stats_progress_json(stats):
stats.output_json = True
out = StringIO()
stats.show_progress(item=Item(path="foo"), stream=out)
result = json.loads(out.getvalue())
assert result["type"] == "archive_progress"
assert isinstance(result["time"], float)
assert result["finished"] is False
assert result["path"] == "foo"
assert result["original_size"] == 20
assert result["nfiles"] == 1
out = StringIO()
stats.show_progress(stream=out, final=True)
result = json.loads(out.getvalue())
assert result["type"] == "archive_progress"
assert isinstance(result["time"], float)
assert result["finished"] is True # see #6570
assert "path" not in result
assert "original_size" not in result
assert "nfiles" not in result
@pytest.mark.parametrize(
"isoformat, expected",
[
("1970-01-01T00:00:01.000001", datetime(1970, 1, 1, 0, 0, 1, 1, timezone.utc)), # test with microseconds
("1970-01-01T00:00:01", datetime(1970, 1, 1, 0, 0, 1, 0, timezone.utc)), # test without microseconds
],
)
def test_timestamp_parsing(monkeypatch, isoformat, expected):
repository = Mock()
key = PlaintextKey(repository)
manifest = Manifest(key, repository)
a = Archive(manifest, "test", create=True)
a.metadata = ArchiveItem(time=isoformat)
assert a.ts == expected
class MockCache:
class MockRepo:
def async_response(self, wait=True):
pass
def __init__(self):
self.objects = {}
self.repository = self.MockRepo()
def add_chunk(self, id, meta, data, stats=None, wait=True, ro_type=None):
assert ro_type is not None
self.objects[id] = data
return id, len(data)
def fetch_many(self, ids, ro_type=None):
"""Mock implementation of fetch_many"""
for id in ids:
yield self.objects[id]
def test_cache_chunk_buffer():
data = [Item(path="p1"), Item(path="p2")]
cache = MockCache()
key = PlaintextKey(None)
chunks = CacheChunkBuffer(cache, key, None)
for d in data:
chunks.add(d)
chunks.flush()
chunks.flush(flush=True)
assert len(chunks.chunks) == 2
unpacker = msgpack.Unpacker()
for id in chunks.chunks:
unpacker.feed(cache.objects[id])
assert data == [Item(internal_dict=d) for d in unpacker]
def test_partial_cache_chunk_buffer():
big = "0123456789abcdefghijklmnopqrstuvwxyz" * 25000
data = [Item(path="full", target=big), Item(path="partial", target=big)]
cache = MockCache()
key = PlaintextKey(None)
chunks = CacheChunkBuffer(cache, key, None)
for d in data:
chunks.add(d)
chunks.flush(flush=False)
# the code is expected to leave the last partial chunk in the buffer
assert len(chunks.chunks) == 3
assert chunks.buffer.tell() > 0
# now really flush
chunks.flush(flush=True)
assert len(chunks.chunks) == 4
assert chunks.buffer.tell() == 0
unpacker = msgpack.Unpacker()
for id in chunks.chunks:
unpacker.feed(cache.objects[id])
assert data == [Item(internal_dict=d) for d in unpacker]
def make_chunks(items):
return b"".join(msgpack.packb({"path": item}) for item in items)
def _validator(value):
return isinstance(value, dict) and value.get("path") in ("foo", "bar", "boo", "baz")
def process(input):
unpacker = RobustUnpacker(validator=_validator, item_keys=ITEM_KEYS)
result = []
for should_sync, chunks in input:
if should_sync:
unpacker.resync()
for data in chunks:
unpacker.feed(data)
for item in unpacker:
result.append(item)
return result
def test_extra_garbage_no_sync():
chunks = [(False, [make_chunks(["foo", "bar"])]), (False, [b"garbage"] + [make_chunks(["boo", "baz"])])]
res = process(chunks)
assert res == [{"path": "foo"}, {"path": "bar"}, 103, 97, 114, 98, 97, 103, 101, {"path": "boo"}, {"path": "baz"}]
def split(left, length):
parts = []
while left:
parts.append(left[:length])
left = left[length:]
return parts
def test_correct_stream():
chunks = split(make_chunks(["foo", "bar", "boo", "baz"]), 2)
input = [(False, chunks)]
result = process(input)
assert result == [{"path": "foo"}, {"path": "bar"}, {"path": "boo"}, {"path": "baz"}]
def test_missing_chunk():
chunks = split(make_chunks(["foo", "bar", "boo", "baz"]), 4)
input = [(False, chunks[:3]), (True, chunks[4:])]
result = process(input)
assert result == [{"path": "foo"}, {"path": "boo"}, {"path": "baz"}]
def test_corrupt_chunk():
chunks = split(make_chunks(["foo", "bar", "boo", "baz"]), 4)
input = [(False, chunks[:3]), (True, [b"gar", b"bage"] + chunks[3:])]
result = process(input)
assert result == [{"path": "foo"}, {"path": "boo"}, {"path": "baz"}]
@pytest.fixture
def item_keys_serialized():
return [msgpack.packb(name) for name in ITEM_KEYS]
@pytest.mark.parametrize(
"packed",
[b"", b"x", b"foobar"]
+ [
msgpack.packb(o)
for o in (
[None, 0, 0.0, False, "", {}, [], ()]
+ [42, 23.42, True, b"foobar", {b"foo": b"bar"}, [b"foo", b"bar"], (b"foo", b"bar")]
)
],
)
def test_invalid_msgpacked_item(packed, item_keys_serialized):
assert not valid_msgpacked_dict(packed, item_keys_serialized)
# pytest-xdist requires always same order for the keys and dicts:
IK = sorted(list(ITEM_KEYS))
@pytest.mark.parametrize(
"packed",
[
msgpack.packb(o)
for o in [
{"path": b"/a/b/c"}, # small (different msgpack mapping type!)
OrderedDict((k, b"") for k in IK), # as big (key count) as it gets
OrderedDict((k, b"x" * 1000) for k in IK), # as big (key count and volume) as it gets
]
],
ids=["minimal", "empty-values", "long-values"],
)
def test_valid_msgpacked_items(packed, item_keys_serialized):
assert valid_msgpacked_dict(packed, item_keys_serialized)
def test_key_length_msgpacked_items():
key = "x" * 32 # 31 bytes is the limit for fixstr msgpack type
data = {key: b""}
item_keys_serialized = [msgpack.packb(key)]
assert valid_msgpacked_dict(msgpack.packb(data), item_keys_serialized)
def test_backup_io():
with pytest.raises(BackupOSError):
with backup_io:
raise OSError(123)
def test_backup_io_iter():
class Iterator:
def __init__(self, exc):
self.exc = exc
def __next__(self):
raise self.exc()
oserror_iterator = Iterator(OSError)
with pytest.raises(BackupOSError):
for _ in backup_io_iter(oserror_iterator):
pass
normal_iterator = Iterator(StopIteration)
for _ in backup_io_iter(normal_iterator):
assert False, "StopIteration handled incorrectly"
def test_get_item_uid_gid():
# test requires that:
# - a user/group name for the current process' real uid/gid exists.
# - a system user/group udoesnotexist:gdoesnotexist does NOT exist.
try:
puid, pgid = os.getuid(), os.getgid() # UNIX only
except AttributeError:
puid, pgid = 0, 0
puser, pgroup = uid2user(puid), gid2group(pgid)
# this is intentionally a "strange" item, with not matching ids/names.
item = Item(path="filename", uid=1, gid=2, user=puser, group=pgroup)
uid, gid = get_item_uid_gid(item, numeric=False)
# these are found via a name-to-id lookup
assert uid == puid
assert gid == pgid
uid, gid = get_item_uid_gid(item, numeric=True)
# these are directly taken from the item.uid and .gid
assert uid == 1
assert gid == 2
uid, gid = get_item_uid_gid(item, numeric=False, uid_forced=3, gid_forced=4)
# these are enforced (not from item metadata)
assert uid == 3
assert gid == 4
# item metadata broken, has negative ids.
item = Item(path="filename", uid=-1, gid=-2, user=puser, group=pgroup)
uid, gid = get_item_uid_gid(item, numeric=True)
# use the uid/gid defaults (which both default to 0).
assert uid == 0
assert gid == 0
uid, gid = get_item_uid_gid(item, numeric=True, uid_default=5, gid_default=6)
# use the uid/gid defaults (as given).
assert uid == 5
assert gid == 6
# item metadata broken, has negative ids and non-existing user/group names.
item = Item(path="filename", uid=-3, gid=-4, user="udoesnotexist", group="gdoesnotexist")
uid, gid = get_item_uid_gid(item, numeric=False)
# use the uid/gid defaults (which both default to 0).
assert uid == 0
assert gid == 0
uid, gid = get_item_uid_gid(item, numeric=True, uid_default=7, gid_default=8)
# use the uid/gid defaults (as given).
assert uid == 7
assert gid == 8
if not is_win32:
# due to the hack in borg.platform.windows user2uid / group2gid, these always return 0
# (no matter which username we ask for) and they never raise a KeyError (like e.g. for
# a non-existing user/group name). Thus, these tests can currently not succeed on win32.
# item metadata has valid uid/gid, but non-existing user/group names.
item = Item(path="filename", uid=9, gid=10, user="udoesnotexist", group="gdoesnotexist")
uid, gid = get_item_uid_gid(item, numeric=False)
# because user/group name does not exist here, use valid numeric ids from item metadata.
assert uid == 9
assert gid == 10
uid, gid = get_item_uid_gid(item, numeric=False, uid_default=11, gid_default=12)
# because item uid/gid seems valid, do not use the given uid/gid defaults
assert uid == 9
assert gid == 10
# item metadata only has uid/gid, but no user/group.
item = Item(path="filename", uid=13, gid=14)
uid, gid = get_item_uid_gid(item, numeric=False)
# it'll check user/group first, but as there is nothing in the item, falls back to uid/gid.
assert uid == 13
assert gid == 14
uid, gid = get_item_uid_gid(item, numeric=True)
# does not check user/group, directly returns uid/gid.
assert uid == 13
assert gid == 14
# item metadata has no uid/gid/user/group.
item = Item(path="filename")
uid, gid = get_item_uid_gid(item, numeric=False, uid_default=15)
# as there is nothing, it'll fall back to uid_default/gid_default.
assert uid == 15
assert gid == 0
uid, gid = get_item_uid_gid(item, numeric=True, gid_default=16)
# as there is nothing, it'll fall back to uid_default/gid_default.
assert uid == 0
assert gid == 16
def test_reject_non_sanitized_item():
for path in rejected_dotdot_paths:
with pytest.raises(ValueError, match="unexpected '..' element in path"):
Item(path=path, user="root", group="root")
@pytest.fixture
def setup_extractor(tmpdir):
"""Setup common test infrastructure"""
class MockCache:
def __init__(self):
self.objects = {}
repository = Mock()
key = PlaintextKey(repository)
manifest = Manifest(key, repository)
cache = MockCache()
extractor = Archive(manifest=manifest, name="test", create=True)
extractor.pipeline = cache
extractor.key = key
extractor.cwd = str(tmpdir)
extractor.restore_attrs = Mock()
# Track fetched chunks across tests
fetched_chunks = []
def create_mock_chunks(item_data, chunk_size=4):
chunks = []
for i in range(0, len(item_data), chunk_size):
chunk_data = item_data[i : i + chunk_size]
chunk_id = key.id_hash(chunk_data)
chunks.append(ChunkListEntry(id=chunk_id, size=len(chunk_data)))
cache.objects[chunk_id] = chunk_data
item = Mock(spec=["chunks", "size", "__contains__", "get"])
item.chunks = chunks
item.size = len(item_data)
item.__contains__ = lambda self, item: item == "size"
return item, str(tmpdir.join("test.txt"))
def mock_fetch_many(chunk_ids, ro_type=None):
fetched_chunks.extend(chunk_ids)
return iter([cache.objects[chunk_id] for chunk_id in chunk_ids])
def clear_fetched_chunks():
fetched_chunks.clear()
def get_fetched_chunks():
return fetched_chunks
cache.fetch_many = mock_fetch_many
return extractor, key, cache, tmpdir, create_mock_chunks, get_fetched_chunks, clear_fetched_chunks
@pytest.mark.parametrize(
"name, item_data, fs_data, expected_fetched_chunks",
[
(
"no_changes",
b"1111", # One complete chunk, no changes needed
b"1111", # Identical content
0, # No chunks should be fetched
),
(
"single_chunk_change",
b"11112222", # Two chunks
b"1111XXXX", # Second chunk different
1, # Only second chunk should be fetched
),
(
"cross_boundary_change",
b"11112222", # Two chunks
b"111XX22", # Change crosses chunk boundary
2, # Both chunks need update
),
(
"exact_multiple_chunks",
b"11112222333", # Three chunks (last one partial)
b"1111XXXX333", # Middle chunk different
1, # Only middle chunk fetched
),
(
"first_chunk_change",
b"11112222", # Two chunks
b"XXXX2222", # First chunk different
1, # Only first chunk should be fetched
),
(
"all_chunks_different",
b"11112222", # Two chunks
b"XXXXYYYY", # Both chunks different
2, # Both chunks should be fetched
),
(
"partial_last_chunk",
b"111122", # One full chunk + partial
b"1111XX", # Partial chunk different
1, # Only second chunk should be fetched
),
(
"fs_file_shorter",
b"11112222", # Two chunks in archive
b"111122", # Shorter on disk - missing part of second chunk
1, # Should fetch second chunk
),
(
"fs_file_longer",
b"11112222", # Two chunks in archive
b"1111222233", # Longer on disk
0, # Should fetch no chunks since content matches up to archive length
),
(
"empty_archive_file",
b"", # Empty in archive
b"11112222", # Content on disk
0, # No chunks to compare = no chunks to fetch
),
(
"empty_fs_file",
b"11112222", # Two chunks in archive
b"", # Empty on disk
2, # Should fetch all chunks since file is empty
),
],
)
def test_compare_and_extract_chunks(setup_extractor, name, item_data, fs_data, expected_fetched_chunks):
"""Test chunk comparison and extraction"""
extractor, key, cache, tmpdir, create_mock_chunks, get_fetched_chunks, clear_fetched_chunks = setup_extractor
clear_fetched_chunks()
chunk_size = 4
item, target_path = create_mock_chunks(item_data, chunk_size=chunk_size)
original_chunk_ids = [chunk.id for chunk in item.chunks]
with open(target_path, "wb") as f:
f.write(fs_data)
st = os.stat(target_path)
result = extractor.compare_and_extract_chunks(item, target_path, st=st)
assert result
fetched_chunks = get_fetched_chunks()
assert len(fetched_chunks) == expected_fetched_chunks
# For single chunk changes, verify it's the correct chunk
if expected_fetched_chunks == 1:
item_chunks = [item_data[i : i + chunk_size] for i in range(0, len(item_data), chunk_size)]
fs_chunks = [fs_data[i : i + chunk_size] for i in range(0, len(fs_data), chunk_size)]
# Find which chunk should have changed by comparing item_data with fs_data
for i, (item_chunk, fs_chunk) in enumerate(zip(item_chunks, fs_chunks)):
if item_chunk != fs_chunk:
assert fetched_chunks[0] == original_chunk_ids[i]
break
with open(target_path, "rb") as f:
assert f.read() == item_data