1
|
# SPDX-License-Identifier: CC0-1.0
|
2
|
|
3
|
# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
|
4
|
#
|
5
|
# Available under the terms of Creative Commons Zero v1.0 Universal.
|
6
|
|
7
|
# Enable using with Python 3.7.
|
8
|
from __future__ import annotations
|
9
|
|
10
|
import pytest
|
11
|
import json
|
12
|
import shutil
|
13
|
import functools as ft
|
14
|
|
15
|
from tempfile import TemporaryDirectory
|
16
|
from pathlib import Path, PurePosixPath
|
17
|
from hashlib import sha256
|
18
|
from zipfile import ZipFile
|
19
|
from contextlib import contextmanager
|
20
|
|
21
|
from jsonschema import ValidationError
|
22
|
|
23
|
from hydrilla import util as hydrilla_util
|
24
|
from hydrilla.util._util import _schema_name_re
|
25
|
from hydrilla.builder import build, _version, local_apt
|
26
|
from hydrilla.builder.common_errors import *
|
27
|
|
28
|
from .helpers import *
|
29
|
|
30
|
here = Path(__file__).resolve().parent
|
31
|
|
32
|
expected_generated_by = {
|
33
|
'name': 'hydrilla.builder',
|
34
|
'version': _version.version
|
35
|
}
|
36
|
|
37
|
orig_srcdir = here / 'source-package-example'
|
38
|
|
39
|
index_obj, _ = hydrilla_util.load_instance_from_file(orig_srcdir / 'index.json')
|
40
|
|
41
|
def read_files(*file_list):
|
42
|
"""
|
43
|
Take names of files under srcdir and return a dict that maps them to their
|
44
|
contents (as bytes).
|
45
|
"""
|
46
|
return dict((name, (orig_srcdir / name).read_bytes()) for name in file_list)
|
47
|
|
48
|
dist_files = {
|
49
|
**read_files('LICENSES/CC0-1.0.txt', 'bye.js', 'hello.js', 'message.js'),
|
50
|
'report.spdx': b'dummy spdx output'
|
51
|
}
|
52
|
src_files = {
|
53
|
**dist_files,
|
54
|
**read_files('README.txt', 'README.txt.license', '.reuse/dep5',
|
55
|
'index.json')
|
56
|
}
|
57
|
extra_archive_files = {
|
58
|
}
|
59
|
|
60
|
sha256_hashes = dict((name, sha256(contents).digest().hex())
|
61
|
for name, contents in src_files.items())
|
62
|
|
63
|
del src_files['report.spdx']
|
64
|
|
65
|
expected_source_copyright = [{
|
66
|
'file': 'report.spdx',
|
67
|
'sha256': sha256_hashes['report.spdx']
|
68
|
}, {
|
69
|
'file': 'LICENSES/CC0-1.0.txt',
|
70
|
'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
|
71
|
}]
|
72
|
|
73
|
expected_resources = [{
|
74
|
'$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
|
75
|
'source_name': 'hello',
|
76
|
'source_copyright': expected_source_copyright,
|
77
|
'type': 'resource',
|
78
|
'identifier': 'helloapple',
|
79
|
'long_name': 'Hello Apple',
|
80
|
'uuid': 'a6754dcb-58d8-4b7a-a245-24fd7ad4cd68',
|
81
|
'version': [2021, 11, 10],
|
82
|
'revision': 1,
|
83
|
'description': 'greets an apple',
|
84
|
'dependencies': [{'identifier': 'hello-message'}],
|
85
|
'scripts': [{
|
86
|
'file': 'hello.js',
|
87
|
'sha256': sha256_hashes['hello.js']
|
88
|
}, {
|
89
|
'file': 'bye.js',
|
90
|
'sha256': sha256_hashes['bye.js']
|
91
|
}],
|
92
|
'generated_by': expected_generated_by
|
93
|
}, {
|
94
|
'$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
|
95
|
'source_name': 'hello',
|
96
|
'source_copyright': expected_source_copyright,
|
97
|
'type': 'resource',
|
98
|
'identifier': 'hello-message',
|
99
|
'long_name': 'Hello Message',
|
100
|
'uuid': '1ec36229-298c-4b35-8105-c4f2e1b9811e',
|
101
|
'version': [2021, 11, 10],
|
102
|
'revision': 2,
|
103
|
'description': 'define messages for saying hello and bye',
|
104
|
'dependencies': [],
|
105
|
'scripts': [{
|
106
|
'file': 'message.js',
|
107
|
'sha256': sha256_hashes['message.js']
|
108
|
}],
|
109
|
'generated_by': expected_generated_by
|
110
|
}]
|
111
|
|
112
|
expected_mapping = {
|
113
|
'$schema': 'https://hydrilla.koszko.org/schemas/api_mapping_description-1.schema.json',
|
114
|
'source_name': 'hello',
|
115
|
'source_copyright': expected_source_copyright,
|
116
|
'type': 'mapping',
|
117
|
'identifier': 'helloapple',
|
118
|
'long_name': 'Hello Apple',
|
119
|
'uuid': '54d23bba-472e-42f5-9194-eaa24c0e3ee7',
|
120
|
'version': [2021, 11, 10],
|
121
|
'description': 'causes apple to get greeted on Hydrillabugs issue tracker',
|
122
|
'payloads': {
|
123
|
'https://hydrillabugs.koszko.org/***': {
|
124
|
'identifier': 'helloapple'
|
125
|
},
|
126
|
'https://hachettebugs.koszko.org/***': {
|
127
|
'identifier': 'helloapple'
|
128
|
}
|
129
|
},
|
130
|
'generated_by': expected_generated_by
|
131
|
}
|
132
|
|
133
|
expected_source_description = {
|
134
|
'$schema': 'https://hydrilla.koszko.org/schemas/api_source_description-1.schema.json',
|
135
|
'source_name': 'hello',
|
136
|
'source_copyright': expected_source_copyright,
|
137
|
'source_archives': {
|
138
|
'zip': {
|
139
|
'sha256': '!!!!value to fill during test!!!!',
|
140
|
}
|
141
|
},
|
142
|
'upstream_url': 'https://git.koszko.org/hydrilla-source-package-example',
|
143
|
'definitions': [{
|
144
|
'type': 'mapping',
|
145
|
'identifier': 'helloapple',
|
146
|
'long_name': 'Hello Apple',
|
147
|
'version': [2021, 11, 10],
|
148
|
}, {
|
149
|
'type': 'resource',
|
150
|
'identifier': 'helloapple',
|
151
|
'long_name': 'Hello Apple',
|
152
|
'version': [2021, 11, 10],
|
153
|
}, {
|
154
|
'type': 'resource',
|
155
|
'identifier': 'hello-message',
|
156
|
'long_name': 'Hello Message',
|
157
|
'version': [2021, 11, 10],
|
158
|
}],
|
159
|
'generated_by': expected_generated_by
|
160
|
}
|
161
|
|
162
|
expected = [expected_mapping, *expected_resources, expected_source_description]
|
163
|
expected_items = expected[:3]
|
164
|
|
165
|
def run_reuse(command, **kwargs):
|
166
|
"""
|
167
|
Instead of running a 'reuse' command, check if 'mock_reuse_missing' file
|
168
|
exists under root directory. If yes, raise FileNotFoundError as if 'reuse'
|
169
|
command was missing. If not, check if 'README.txt.license' file exists
|
170
|
in the requested directory and return zero if it does.
|
171
|
"""
|
172
|
expected = ['reuse', '--root', '<root>',
|
173
|
'lint' if 'lint' in command else 'spdx']
|
174
|
|
175
|
root_path = Path(process_command(command, expected)['root'])
|
176
|
|
177
|
if (root_path / 'mock_reuse_missing').exists():
|
178
|
raise FileNotFoundError('dummy')
|
179
|
|
180
|
is_reuse_compliant = (root_path / 'README.txt.license').exists()
|
181
|
|
182
|
return MockedCompletedProcess(command, 1 - is_reuse_compliant,
|
183
|
stdout=f'dummy {expected[-1]} output',
|
184
|
text_output=kwargs.get('text'))
|
185
|
|
186
|
mocked_piggybacked_archives = [
|
187
|
PurePosixPath('apt/something.deb'),
|
188
|
PurePosixPath('apt/something.orig.tar.gz'),
|
189
|
PurePosixPath('apt/something.debian.tar.xz'),
|
190
|
PurePosixPath('othersystem/other-something.tar.gz')
|
191
|
]
|
192
|
|
193
|
@pytest.fixture
|
194
|
def mock_piggybacked_apt_system(monkeypatch):
|
195
|
"""Make local_apt.piggybacked_system() return a mocked result."""
|
196
|
# We set 'td' to a temporary dir path further below.
|
197
|
td = None
|
198
|
|
199
|
class MockedPiggybacked:
|
200
|
"""Minimal mock of Piggybacked object."""
|
201
|
package_license_files = [PurePosixPath('.apt-root/.../copyright')]
|
202
|
resource_must_depend = [{'identifier': 'apt-common-licenses'}]
|
203
|
|
204
|
def resolve_file(path):
|
205
|
"""
|
206
|
For each path that starts with '.apt-root' return a valid dummy file
|
207
|
path.
|
208
|
"""
|
209
|
if path.parts[0] != '.apt-root':
|
210
|
return None
|
211
|
|
212
|
(td / path.name).write_text(f'dummy {path.name}')
|
213
|
|
214
|
return (td / path.name)
|
215
|
|
216
|
def archive_files():
|
217
|
"""Yield some valid dummy file path tuples."""
|
218
|
for desired_path in mocked_piggybacked_archives:
|
219
|
real_path = td / desired_path.name
|
220
|
real_path.write_text(f'dummy {desired_path.name}')
|
221
|
|
222
|
yield desired_path, real_path
|
223
|
|
224
|
@contextmanager
|
225
|
def mocked_piggybacked_system(piggyback_def, piggyback_files):
|
226
|
"""Mock the execution of local_apt.piggybacked_system()."""
|
227
|
assert piggyback_def == {
|
228
|
'system': 'apt',
|
229
|
'distribution': 'nabia',
|
230
|
'packages': ['somelib=1.0'],
|
231
|
'dependencies': False
|
232
|
}
|
233
|
if piggyback_files is not None:
|
234
|
assert {str(path) for path in mocked_piggybacked_archives} == \
|
235
|
{path.relative_to(piggyback_files).as_posix()
|
236
|
for path in piggyback_files.rglob('*') if path.is_file()}
|
237
|
|
238
|
yield MockedPiggybacked
|
239
|
|
240
|
monkeypatch.setattr(local_apt, 'piggybacked_system',
|
241
|
mocked_piggybacked_system)
|
242
|
|
243
|
with TemporaryDirectory() as td:
|
244
|
td = Path(td)
|
245
|
yield
|
246
|
|
247
|
@pytest.fixture
|
248
|
def sample_source():
|
249
|
"""Prepare a directory with sample Haketilo source package."""
|
250
|
with TemporaryDirectory() as td:
|
251
|
sample_source = Path(td) / 'hello'
|
252
|
for name, contents in src_files.items():
|
253
|
path = sample_source / name
|
254
|
path.parent.mkdir(parents=True, exist_ok=True)
|
255
|
path.write_bytes(contents)
|
256
|
|
257
|
yield sample_source
|
258
|
|
259
|
def collect(list):
|
260
|
"""Decorate function by appending it to the specified list."""
|
261
|
def decorator(function):
|
262
|
"""The actual decorator that will be applied."""
|
263
|
list.append(function)
|
264
|
return function
|
265
|
|
266
|
return decorator
|
267
|
|
268
|
variant_makers = []
|
269
|
|
270
|
@collect(variant_makers)
|
271
|
def sample_source_change_index_json(monkeypatch, sample_source):
|
272
|
"""
|
273
|
Return a non-standard path for index.json. Ensure parent directories exist.
|
274
|
"""
|
275
|
# Use a path under sample_source so that it gets auto-deleted after the
|
276
|
# test. Use a file under .git because .git is ignored by REUSE.
|
277
|
path = sample_source / '.git' / 'replacement.json'
|
278
|
path.parent.mkdir()
|
279
|
return path
|
280
|
|
281
|
@collect(variant_makers)
|
282
|
def sample_source_add_comments(monkeypatch, sample_source):
|
283
|
"""Add index.json comments that should be preserved."""
|
284
|
for dictionary in (index_obj, *index_obj['definitions'], *expected):
|
285
|
monkeypatch.setitem(dictionary, 'comment', 'index.json comment')
|
286
|
|
287
|
@collect(variant_makers)
|
288
|
def sample_source_remove_spdx(monkeypatch, sample_source):
|
289
|
"""Remove spdx report generation."""
|
290
|
monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
|
291
|
|
292
|
pred = lambda ref: ref['file'] != 'report.spdx'
|
293
|
copy_refs_in = list(filter(pred, index_obj['copyright']))
|
294
|
monkeypatch.setitem(index_obj, 'copyright', copy_refs_in)
|
295
|
|
296
|
copy_refs_out = list(filter(pred, expected_source_copyright))
|
297
|
for obj in expected:
|
298
|
monkeypatch.setitem(obj, 'source_copyright', copy_refs_out)
|
299
|
|
300
|
monkeypatch.delitem(dist_files, 'report.spdx')
|
301
|
|
302
|
# To verify that reuse does not get called now, make mocked subprocess.run()
|
303
|
# raise an error if called.
|
304
|
(sample_source / 'mock_reuse_missing').touch()
|
305
|
|
306
|
@collect(variant_makers)
|
307
|
def sample_source_remove_additional_files(monkeypatch, sample_source):
|
308
|
"""Use default value ([]) for 'additionall_files' property."""
|
309
|
monkeypatch.delitem(index_obj, 'additional_files')
|
310
|
|
311
|
for name in 'README.txt', 'README.txt.license', '.reuse/dep5':
|
312
|
monkeypatch.delitem(src_files, name)
|
313
|
|
314
|
@collect(variant_makers)
|
315
|
def sample_source_remove_script(monkeypatch, sample_source):
|
316
|
"""Use default value ([]) for 'scripts' property in one of the resources."""
|
317
|
monkeypatch.delitem(index_obj['definitions'][2], 'scripts')
|
318
|
|
319
|
monkeypatch.setitem(expected_resources[1], 'scripts', [])
|
320
|
|
321
|
for files in dist_files, src_files:
|
322
|
monkeypatch.delitem(files, 'message.js')
|
323
|
|
324
|
@collect(variant_makers)
|
325
|
def sample_source_remove_payloads(monkeypatch, sample_source):
|
326
|
"""Use default value ({}) for 'payloads' property in mapping."""
|
327
|
monkeypatch.delitem(index_obj['definitions'][0], 'payloads')
|
328
|
|
329
|
monkeypatch.setitem(expected_mapping, 'payloads', {})
|
330
|
|
331
|
@collect(variant_makers)
|
332
|
def sample_source_remove_uuids(monkeypatch, sample_source):
|
333
|
"""Don't use UUIDs (they are optional)."""
|
334
|
for definition in index_obj['definitions']:
|
335
|
monkeypatch.delitem(definition, 'uuid')
|
336
|
|
337
|
for description in expected:
|
338
|
if 'uuid' in description:
|
339
|
monkeypatch.delitem(description, 'uuid')
|
340
|
|
341
|
@collect(variant_makers)
|
342
|
def sample_source_add_extra_props(monkeypatch, sample_source):
|
343
|
"""Add some unrecognized properties that should be stripped."""
|
344
|
to_process = [index_obj]
|
345
|
while to_process:
|
346
|
processed = to_process.pop()
|
347
|
|
348
|
if type(processed) is list:
|
349
|
to_process.extend(processed)
|
350
|
elif type(processed) is dict and 'spurious_property' not in processed:
|
351
|
to_process.extend(v for k, v in processed.items()
|
352
|
if k != 'payloads')
|
353
|
monkeypatch.setitem(processed, 'spurious_property', 'some_value')
|
354
|
|
355
|
@collect(variant_makers)
|
356
|
def sample_source_make_version_2(monkeypatch, sample_source,
|
357
|
expected_documents_to_modify=[]):
|
358
|
"""Increase sources' schema version from 1 to 2."""
|
359
|
for obj in (index_obj, *expected_documents_to_modify):
|
360
|
monkeypatch.setitem(obj, '$schema', obj['$schema'].replace('1', '2'))
|
361
|
|
362
|
permission_variant_makers = []
|
363
|
|
364
|
@collect(permission_variant_makers)
|
365
|
def sample_source_bool_perm_ignored(permission, monkeypatch, sample_source,
|
366
|
value=True):
|
367
|
"""
|
368
|
Specify a boolean permissions in sources, but keep sources' schema version
|
369
|
at 1.
|
370
|
"""
|
371
|
for definition in index_obj['definitions']:
|
372
|
monkeypatch.setitem(definition, 'permissions', {permission: value})
|
373
|
|
374
|
@collect(permission_variant_makers)
|
375
|
def sample_source_bool_perm(permission, monkeypatch, sample_source):
|
376
|
"""Specify a boolean permission in sources."""
|
377
|
sample_source_bool_perm_ignored(permission, monkeypatch, sample_source)
|
378
|
sample_source_make_version_2(monkeypatch, sample_source, expected_items)
|
379
|
|
380
|
for obj in expected_items:
|
381
|
monkeypatch.setitem(obj, 'permissions', {permission: True})
|
382
|
|
383
|
@collect(permission_variant_makers)
|
384
|
def sample_source_bool_perm_defaults(permission, monkeypatch, sample_source):
|
385
|
"""
|
386
|
Specify a boolean permission in sources but use the default value ("False").
|
387
|
"""
|
388
|
sample_source_bool_perm_ignored(permission, monkeypatch, sample_source,
|
389
|
value=False)
|
390
|
sample_source_make_version_2(monkeypatch, sample_source)
|
391
|
|
392
|
for permission in 'cors_bypass', 'eval':
|
393
|
for variant_maker in permission_variant_makers:
|
394
|
variant_makers.append(ft.partial(variant_maker, permission))
|
395
|
|
396
|
@collect(variant_makers)
|
397
|
def sample_source_req_mappings_ignored(monkeypatch, sample_source,
|
398
|
value=[{'identifier': 'mapping-dep'}]):
|
399
|
"""
|
400
|
Specify dependencies on mappings, but keep sources' schema version at 1.
|
401
|
"""
|
402
|
for definition in index_obj['definitions']:
|
403
|
monkeypatch.setitem(definition, 'required_mappings', value);
|
404
|
|
405
|
@collect(variant_makers)
|
406
|
def sample_source_req_mappings(monkeypatch, sample_source):
|
407
|
"""Specify dependencies on mappings."""
|
408
|
sample_source_req_mappings_ignored(monkeypatch, sample_source)
|
409
|
sample_source_make_version_2(monkeypatch, sample_source, expected_items)
|
410
|
|
411
|
for obj in expected_items:
|
412
|
monkeypatch.setitem(obj, 'required_mappings',
|
413
|
[{'identifier': 'mapping-dep'}])
|
414
|
|
415
|
@collect(variant_makers)
|
416
|
def sample_source_req_mappings_defaults(monkeypatch, sample_source):
|
417
|
"""Specify dependencies of a mapping, but use the default value ("[]")."""
|
418
|
sample_source_req_mappings_ignored(monkeypatch, sample_source, value=[])
|
419
|
sample_source_make_version_2(monkeypatch, sample_source)
|
420
|
|
421
|
@collect(variant_makers)
|
422
|
def sample_source_combined_def(monkeypatch, sample_source):
|
423
|
"""Define mapping and resource together."""
|
424
|
sample_source_make_version_2(monkeypatch, sample_source)
|
425
|
|
426
|
mapping_def = index_obj['definitions'][0]
|
427
|
resource_defs = index_obj['definitions'][1:3]
|
428
|
|
429
|
item_defs_shortened = [mapping_def, resource_defs[1]]
|
430
|
monkeypatch.setitem(index_obj, 'definitions', item_defs_shortened)
|
431
|
|
432
|
monkeypatch.setitem(mapping_def, 'type', 'mapping_and_resource')
|
433
|
|
434
|
new_mapping_ver = [*expected_mapping['version'], 1]
|
435
|
monkeypatch.setitem(mapping_def, 'revision', 1)
|
436
|
monkeypatch.setitem(expected_mapping, 'version', new_mapping_ver)
|
437
|
|
438
|
for prop in 'scripts', 'dependencies':
|
439
|
monkeypatch.setitem(mapping_def, prop, resource_defs[0][prop])
|
440
|
|
441
|
monkeypatch.setitem(expected_resources[0], 'uuid', mapping_def['uuid'])
|
442
|
monkeypatch.setitem(expected_resources[0], 'description',
|
443
|
mapping_def['description'])
|
444
|
|
445
|
monkeypatch.setitem(expected_source_description['definitions'][0],
|
446
|
'version', new_mapping_ver)
|
447
|
|
448
|
@collect(variant_makers)
|
449
|
def sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source,
|
450
|
min_ver=[1, 2], max_ver=[1, 2]):
|
451
|
"""
|
452
|
Specify version constraints on Haketilo, but keep sources' schema version at
|
453
|
1.
|
454
|
"""
|
455
|
mapping_def = index_obj['definitions'][0]
|
456
|
monkeypatch.setitem(mapping_def, 'min_haketilo_version', min_ver)
|
457
|
monkeypatch.setitem(mapping_def, 'max_haketilo_version', max_ver)
|
458
|
|
459
|
@collect(variant_makers)
|
460
|
def sample_source_minmax_haketilo_ver(monkeypatch, sample_source):
|
461
|
"""Specify version constraints on Haketilo."""
|
462
|
sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source)
|
463
|
sample_source_make_version_2(monkeypatch, sample_source, [expected_mapping])
|
464
|
|
465
|
monkeypatch.setitem(expected_mapping, 'min_haketilo_version', [1, 2])
|
466
|
monkeypatch.setitem(expected_mapping, 'max_haketilo_version', [1, 2])
|
467
|
|
468
|
@collect(variant_makers)
|
469
|
def sample_source_minmax_haketilo_ver_default(monkeypatch, sample_source):
|
470
|
"""Specify version constraints on Haketilo, but use default values."""
|
471
|
sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source,
|
472
|
min_ver=[1], max_ver=[65536])
|
473
|
sample_source_make_version_2(monkeypatch, sample_source)
|
474
|
|
475
|
piggyback_archive_names = [
|
476
|
'apt/something.deb',
|
477
|
'apt/something.orig.tar.gz',
|
478
|
'apt/something.debian.tar.xz',
|
479
|
'othersystem/other-something.tar.gz'
|
480
|
]
|
481
|
|
482
|
@collect(variant_makers)
|
483
|
def sample_source_add_piggyback_ignored(monkeypatch, sample_source,
|
484
|
extra_build_args={}):
|
485
|
"""
|
486
|
Add piggybacked foreign system packages, but keep sources' schema version at
|
487
|
1.
|
488
|
"""
|
489
|
old_build = build.Build
|
490
|
new_build = lambda *a, **kwa: old_build(*a, **kwa, **extra_build_args)
|
491
|
monkeypatch.setattr(build, 'Build', new_build)
|
492
|
|
493
|
monkeypatch.setitem(index_obj, 'piggyback_on', {
|
494
|
'system': 'apt',
|
495
|
'distribution': 'nabia',
|
496
|
'packages': ['somelib=1.0'],
|
497
|
'dependencies': False
|
498
|
})
|
499
|
|
500
|
@collect(variant_makers)
|
501
|
def sample_source_add_piggyback(monkeypatch, sample_source,
|
502
|
extra_build_args={}):
|
503
|
"""Add piggybacked foreign system packages."""
|
504
|
sample_source_add_piggyback_ignored\
|
505
|
(monkeypatch, sample_source, extra_build_args)
|
506
|
|
507
|
sample_source_make_version_2(monkeypatch, sample_source)
|
508
|
|
509
|
new_refs = {}
|
510
|
for name in '.apt-root/.../copyright', '.apt-root/.../script.js':
|
511
|
contents = f'dummy {PurePosixPath(name).name}'.encode()
|
512
|
digest = sha256(contents).digest().hex()
|
513
|
monkeypatch.setitem(dist_files, name, contents)
|
514
|
monkeypatch.setitem(sha256_hashes, name, digest)
|
515
|
new_refs[PurePosixPath(name).name] = {'file': name, 'sha256': digest}
|
516
|
|
517
|
new_list = [*expected_source_copyright, new_refs['copyright']]
|
518
|
for obj in expected:
|
519
|
monkeypatch.setitem(obj, 'source_copyright', new_list)
|
520
|
|
521
|
for obj in expected_resources:
|
522
|
new_list = [{'identifier': 'apt-common-licenses'}, *obj['dependencies']]
|
523
|
monkeypatch.setitem(obj, 'dependencies', new_list)
|
524
|
|
525
|
for obj in index_obj['definitions'][1], expected_resources[0]:
|
526
|
new_list = [new_refs['script.js'], *obj['scripts']]
|
527
|
monkeypatch.setitem(obj, 'scripts', new_list)
|
528
|
|
529
|
for name in piggyback_archive_names:
|
530
|
path = PurePosixPath('hello.foreign-packages') / name
|
531
|
monkeypatch.setitem(extra_archive_files, str(path),
|
532
|
f'dummy {path.name}'.encode())
|
533
|
|
534
|
def prepare_foreign_packages_dir(path):
|
535
|
"""
|
536
|
Put some dummy archive in the directory so that it can be passed to
|
537
|
piggybacked_system().
|
538
|
"""
|
539
|
for name in piggyback_archive_names:
|
540
|
archive_path = path / name
|
541
|
archive_path.parent.mkdir(parents=True, exist_ok=True)
|
542
|
archive_path.write_text(f'dummy {archive_path.name}')
|
543
|
|
544
|
@collect(variant_makers)
|
545
|
def sample_source_add_piggyback_pass_archives(monkeypatch, sample_source):
|
546
|
"""
|
547
|
Add piggybacked foreign system packages, use pre-downloaded foreign package
|
548
|
archives (have Build() find them in their default directory).
|
549
|
"""
|
550
|
# Dir next to 'sample_source' will also be gc'd by sample_source() fixture.
|
551
|
foreign_packages_dir = sample_source.parent / 'arbitrary-name'
|
552
|
|
553
|
prepare_foreign_packages_dir(foreign_packages_dir)
|
554
|
|
555
|
sample_source_add_piggyback(monkeypatch, sample_source,
|
556
|
{'piggyback_files': foreign_packages_dir})
|
557
|
|
558
|
@collect(variant_makers)
|
559
|
def sample_source_add_piggyback_find_archives(monkeypatch, sample_source):
|
560
|
"""
|
561
|
Add piggybacked foreign system packages, use pre-downloaded foreign package
|
562
|
archives (specify their directory as argument to Build()).
|
563
|
"""
|
564
|
# Dir next to 'sample_source' will also be gc'd by sample_source() fixture.
|
565
|
foreign_packages_dir = sample_source.parent / 'hello.foreign-packages'
|
566
|
|
567
|
prepare_foreign_packages_dir(foreign_packages_dir)
|
568
|
|
569
|
sample_source_add_piggyback(monkeypatch, sample_source)
|
570
|
|
571
|
@collect(variant_makers)
|
572
|
def sample_source_add_piggyback_no_download(monkeypatch, sample_source,
|
573
|
pass_directory_to_build=False):
|
574
|
"""
|
575
|
Add piggybacked foreign system packages, use pre-downloaded foreign package
|
576
|
archives.
|
577
|
"""
|
578
|
# Use a dir next to 'sample_source'; have it gc'd by sample_source fixture.
|
579
|
if pass_directory_to_build:
|
580
|
foreign_packages_dir = sample_source.parent / 'arbitrary-name'
|
581
|
else:
|
582
|
foreign_packages_dir = sample_source.parent / 'hello.foreign-packages'
|
583
|
|
584
|
prepare_foreign_packages_dir(foreign_packages_dir)
|
585
|
|
586
|
sample_source_add_piggyback(monkeypatch, sample_source)
|
587
|
|
588
|
@pytest.fixture(params=[lambda m, s: None, *variant_makers])
|
589
|
def sample_source_make_variants(request, monkeypatch, sample_source,
|
590
|
mock_piggybacked_apt_system):
|
591
|
"""
|
592
|
Prepare a directory with sample Haketilo source package in multiple slightly
|
593
|
different versions (all correct). Return an index.json path that should be
|
594
|
used when performing test build.
|
595
|
"""
|
596
|
index_path = request.param(monkeypatch, sample_source) or Path('index.json')
|
597
|
|
598
|
index_text = json.dumps(index_obj)
|
599
|
|
600
|
(sample_source / index_path).write_text(index_text)
|
601
|
|
602
|
monkeypatch.setitem(src_files, 'index.json', index_text.encode())
|
603
|
|
604
|
return index_path
|
605
|
|
606
|
def try_validate(as_what, instance):
|
607
|
"""
|
608
|
Select the right JSON schema. Return without errors only if the instance
|
609
|
validates against it.
|
610
|
"""
|
611
|
major = _schema_name_re.search(instance['$schema']).group('major')
|
612
|
exact_schema_version = {'1': '1.0.1', '2': '2'}[major]
|
613
|
schema_filename = f'{as_what}-{exact_schema_version}.schema.json'
|
614
|
hydrilla_util.validator_for(schema_filename).validate(instance)
|
615
|
|
616
|
@pytest.mark.subprocess_run(build, run_reuse)
|
617
|
@pytest.mark.usefixtures('mock_subprocess_run')
|
618
|
def test_build(sample_source, sample_source_make_variants, tmpdir):
|
619
|
"""Build the sample source package and verify the produced files."""
|
620
|
index_json_path = sample_source_make_variants
|
621
|
|
622
|
# First, build the package
|
623
|
build.Build(sample_source, index_json_path).write_package_files(tmpdir)
|
624
|
|
625
|
# Verify directories under destination directory
|
626
|
assert {'file', 'resource', 'mapping', 'source'} == \
|
627
|
set([path.name for path in tmpdir.iterdir()])
|
628
|
|
629
|
# Verify files under 'file/'
|
630
|
file_dir = tmpdir / 'file' / 'sha256'
|
631
|
|
632
|
for name, contents in dist_files.items():
|
633
|
dist_file_path = file_dir / sha256_hashes[name]
|
634
|
assert dist_file_path.is_file()
|
635
|
assert dist_file_path.read_bytes() == contents
|
636
|
|
637
|
assert {p.name for p in file_dir.iterdir()} == \
|
638
|
{sha256_hashes[name] for name in dist_files.keys()}
|
639
|
|
640
|
# Verify files under 'resource/'
|
641
|
resource_dir = tmpdir / 'resource'
|
642
|
|
643
|
assert {rj['identifier'] for rj in expected_resources} == \
|
644
|
{path.name for path in resource_dir.iterdir()}
|
645
|
|
646
|
for resource_json in expected_resources:
|
647
|
subdir = resource_dir / resource_json['identifier']
|
648
|
ver_str = hydrilla_util.version_string(resource_json['version'])
|
649
|
assert [ver_str] == [path.name for path in subdir.iterdir()]
|
650
|
|
651
|
assert json.loads((subdir / ver_str).read_text()) == resource_json
|
652
|
|
653
|
try_validate('api_resource_description', resource_json)
|
654
|
|
655
|
# Verify files under 'mapping/'
|
656
|
mapping_dir = tmpdir / 'mapping'
|
657
|
assert ['helloapple'] == [path.name for path in mapping_dir.iterdir()]
|
658
|
|
659
|
subdir = mapping_dir / 'helloapple'
|
660
|
|
661
|
ver_str = hydrilla_util.version_string(expected_mapping['version'])
|
662
|
assert [ver_str] == [path.name for path in subdir.iterdir()]
|
663
|
|
664
|
assert json.loads((subdir / ver_str).read_text()) == expected_mapping
|
665
|
|
666
|
try_validate('api_mapping_description', expected_mapping)
|
667
|
|
668
|
# Verify files under 'source/'
|
669
|
source_dir = tmpdir / 'source'
|
670
|
assert {'hello.json', 'hello.zip'} == \
|
671
|
{path.name for path in source_dir.iterdir()}
|
672
|
|
673
|
archive_files = {**dict((f'hello/{name}', contents)
|
674
|
for name, contents in src_files.items()),
|
675
|
**extra_archive_files}
|
676
|
|
677
|
with ZipFile(source_dir / 'hello.zip', 'r') as archive:
|
678
|
print(archive.namelist())
|
679
|
assert len(archive.namelist()) == len(archive_files)
|
680
|
|
681
|
for name, contents in archive_files.items():
|
682
|
assert archive.read(name) == contents
|
683
|
|
684
|
zip_ref = expected_source_description['source_archives']['zip']
|
685
|
zip_contents = (source_dir / 'hello.zip').read_bytes()
|
686
|
zip_ref['sha256'] = sha256(zip_contents).digest().hex()
|
687
|
|
688
|
assert json.loads((source_dir / 'hello.json').read_text()) == \
|
689
|
expected_source_description
|
690
|
|
691
|
try_validate('api_source_description', expected_source_description)
|
692
|
|
693
|
error_makers = []
|
694
|
|
695
|
@collect(error_makers)
|
696
|
def sample_source_error_missing_file(monkeypatch, sample_source):
|
697
|
"""
|
698
|
Modify index.json to expect missing report.spdx file and cause an error.
|
699
|
"""
|
700
|
monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
|
701
|
return FileReferenceError, '^referenced_file_report.spdx_missing$'
|
702
|
|
703
|
@collect(error_makers)
|
704
|
def sample_source_error_index_schema(monkeypatch, sample_source):
|
705
|
"""Modify index.json to be incompliant with the schema."""
|
706
|
monkeypatch.delitem(index_obj, 'definitions')
|
707
|
return ValidationError,
|
708
|
|
709
|
@collect(error_makers)
|
710
|
def sample_source_error_unknown_index_schema(monkeypatch, sample_source):
|
711
|
"""Modify index.json to be use a not-yet-released schema."""
|
712
|
schema_id = \
|
713
|
'https://hydrilla.koszko.org/schemas/package_source-65536.schema.json'
|
714
|
monkeypatch.setitem(index_obj, "$schema", schema_id)
|
715
|
return hydrilla_util.UnknownSchemaError, \
|
716
|
r'^unknown_schema_package_source_.*/hello/index\.json$'
|
717
|
|
718
|
@collect(error_makers)
|
719
|
def sample_source_error_bad_comment(monkeypatch, sample_source):
|
720
|
"""Modify index.json to have an invalid '/' in it."""
|
721
|
return json.JSONDecodeError, '^bad_comment: .*', \
|
722
|
json.dumps(index_obj) + '/something\n'
|
723
|
|
724
|
@collect(error_makers)
|
725
|
def sample_source_error_bad_json(monkeypatch, sample_source):
|
726
|
"""Modify index.json to not be valid json even after comment stripping."""
|
727
|
return json.JSONDecodeError, '', json.dumps(index_obj) + '???\n'
|
728
|
|
729
|
@collect(error_makers)
|
730
|
def sample_source_error_missing_reuse(monkeypatch, sample_source):
|
731
|
"""Cause mocked reuse process invocation to fail with FileNotFoundError."""
|
732
|
(sample_source / 'mock_reuse_missing').touch()
|
733
|
return build.ReuseError, '^couldnt_execute_reuse_is_it_installed$'
|
734
|
|
735
|
@collect(error_makers)
|
736
|
def sample_source_error_missing_license(monkeypatch, sample_source):
|
737
|
"""Remove a file to make package REUSE-incompliant."""
|
738
|
(sample_source / 'README.txt.license').unlink()
|
739
|
|
740
|
error_regex = """^\
|
741
|
command_reuse --root \\S+ lint_failed
|
742
|
|
743
|
STDOUT_OUTPUT_heading
|
744
|
|
745
|
dummy lint output
|
746
|
|
747
|
STDERR_OUTPUT_heading
|
748
|
|
749
|
some error output\
|
750
|
$\
|
751
|
"""
|
752
|
|
753
|
return build.ReuseError, error_regex
|
754
|
|
755
|
@collect(error_makers)
|
756
|
def sample_source_error_file_outside(monkeypatch, sample_source):
|
757
|
"""Make index.json illegally reference a file outside srcdir."""
|
758
|
new_list = [*index_obj['copyright'], {'file': '../abc'}]
|
759
|
monkeypatch.setitem(index_obj, 'copyright', new_list)
|
760
|
return FileReferenceError, '^path_contains_double_dot_\\.\\./abc$'
|
761
|
|
762
|
@collect(error_makers)
|
763
|
def sample_source_error_reference_itself(monkeypatch, sample_source):
|
764
|
"""Make index.json illegally reference index.json."""
|
765
|
new_list = [*index_obj['copyright'], {'file': 'index.json'}]
|
766
|
monkeypatch.setitem(index_obj, 'copyright', new_list)
|
767
|
return FileReferenceError, '^loading_reserved_index_json$'
|
768
|
|
769
|
@collect(error_makers)
|
770
|
def sample_source_error_report_excluded(monkeypatch, sample_source):
|
771
|
"""
|
772
|
Make index.json require generation of report.spdx but don't include it among
|
773
|
copyright files.
|
774
|
"""
|
775
|
new_list = [file_ref for file_ref in index_obj['copyright']
|
776
|
if file_ref['file'] != 'report.spdx']
|
777
|
monkeypatch.setitem(index_obj, 'copyright', new_list)
|
778
|
return FileReferenceError, '^report_spdx_not_in_copyright_list$'
|
779
|
|
780
|
@collect(error_makers)
|
781
|
def sample_source_error_combined_unsupported(monkeypatch, sample_source):
|
782
|
"""
|
783
|
Define mapping and resource together but leave source schema version at 1.x
|
784
|
where this is unsupported.
|
785
|
"""
|
786
|
mapping_def = index_obj['definitions'][0]
|
787
|
monkeypatch.setitem(mapping_def, 'type', 'mapping_and_resource')
|
788
|
|
789
|
return ValidationError,
|
790
|
|
791
|
@pytest.fixture(params=error_makers)
|
792
|
def sample_source_make_errors(request, monkeypatch, sample_source):
|
793
|
"""
|
794
|
Prepare a directory with sample Haketilo source package in multiple slightly
|
795
|
broken versions. Return an error type that should be raised when running
|
796
|
test build.
|
797
|
"""
|
798
|
error_type, error_regex, index_text = \
|
799
|
[*request.param(monkeypatch, sample_source), '', ''][0:3]
|
800
|
|
801
|
index_text = index_text or json.dumps(index_obj)
|
802
|
|
803
|
(sample_source / 'index.json').write_text(index_text)
|
804
|
|
805
|
monkeypatch.setitem(src_files, 'index.json', index_text.encode())
|
806
|
|
807
|
return error_type, error_regex
|
808
|
|
809
|
@pytest.mark.subprocess_run(build, run_reuse)
|
810
|
@pytest.mark.usefixtures('mock_subprocess_run')
|
811
|
def test_build_error(tmpdir, sample_source, sample_source_make_errors):
|
812
|
"""Try building the sample source package and verify generated errors."""
|
813
|
error_type, error_regex = sample_source_make_errors
|
814
|
|
815
|
dstdir = Path(tmpdir) / 'dstdir'
|
816
|
dstdir.mkdir(exist_ok=True)
|
817
|
|
818
|
with pytest.raises(error_type, match=error_regex):
|
819
|
build.Build(sample_source, Path('index.json'))\
|
820
|
.write_package_files(dstdir)
|