Project

General

Profile

Download (23.4 KB) Statistics
| Branch: | Tag: | Revision:

hydrilla-builder / tests / test_build.py @ 61f0aa75

1
# SPDX-License-Identifier: CC0-1.0
2

    
3
# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
4
#
5
# Available under the terms of Creative Commons Zero v1.0 Universal.
6

    
7
# Enable using with Python 3.7.
8
from __future__ import annotations
9

    
10
import pytest
11
import json
12
import shutil
13

    
14
from tempfile import TemporaryDirectory
15
from pathlib import Path, PurePosixPath
16
from hashlib import sha256
17
from zipfile import ZipFile
18
from contextlib import contextmanager
19

    
20
from jsonschema import ValidationError
21

    
22
from hydrilla import util as hydrilla_util
23
from hydrilla.builder import build, _version, local_apt
24
from hydrilla.builder.common_errors import *
25

    
26
from .helpers import *
27

    
28
here = Path(__file__).resolve().parent
29

    
30
expected_generated_by = {
31
    'name': 'hydrilla.builder',
32
    'version': _version.version
33
}
34

    
35
orig_srcdir = here / 'source-package-example'
36

    
37
index_text = (orig_srcdir / 'index.json').read_text()
38
index_obj = json.loads(hydrilla_util.strip_json_comments(index_text))
39

    
40
def read_files(*file_list):
41
    """
42
    Take names of files under srcdir and return a dict that maps them to their
43
    contents (as bytes).
44
    """
45
    return dict((name, (orig_srcdir / name).read_bytes()) for name in file_list)
46

    
47
dist_files = {
48
    **read_files('LICENSES/CC0-1.0.txt', 'bye.js', 'hello.js', 'message.js'),
49
    'report.spdx': b'dummy spdx output'
50
}
51
src_files = {
52
    **dist_files,
53
    **read_files('README.txt', 'README.txt.license', '.reuse/dep5',
54
                 'index.json')
55
}
56
extra_archive_files = {
57
}
58

    
59
sha256_hashes = dict((name, sha256(contents).digest().hex())
60
                     for name, contents in src_files.items())
61

    
62
del src_files['report.spdx']
63

    
64
expected_resources = [{
65
    '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
66
    'source_name': 'hello',
67
    'source_copyright': [{
68
        'file': 'report.spdx',
69
        'sha256': sha256_hashes['report.spdx']
70
    }, {
71
        'file': 'LICENSES/CC0-1.0.txt',
72
        'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
73
    }],
74
    'type': 'resource',
75
    'identifier': 'helloapple',
76
    'long_name': 'Hello Apple',
77
    'uuid': 'a6754dcb-58d8-4b7a-a245-24fd7ad4cd68',
78
    'version': [2021, 11, 10],
79
    'revision': 1,
80
    'description': 'greets an apple',
81
    'dependencies': [{'identifier': 'hello-message'}],
82
    'scripts': [{
83
        'file': 'hello.js',
84
        'sha256': sha256_hashes['hello.js']
85
    }, {
86
        'file': 'bye.js',
87
        'sha256': sha256_hashes['bye.js']
88
    }],
89
    'generated_by': expected_generated_by
90
}, {
91
    '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
92
    'source_name': 'hello',
93
    'source_copyright': [{
94
        'file': 'report.spdx',
95
        'sha256': sha256_hashes['report.spdx']
96
    }, {
97
        'file': 'LICENSES/CC0-1.0.txt',
98
        'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
99
    }],
100
    'type': 'resource',
101
    'identifier': 'hello-message',
102
    'long_name': 'Hello Message',
103
    'uuid': '1ec36229-298c-4b35-8105-c4f2e1b9811e',
104
    'version': [2021, 11, 10],
105
    'revision': 2,
106
    'description': 'define messages for saying hello and bye',
107
    'dependencies': [],
108
    'scripts': [{
109
        'file': 'message.js',
110
        'sha256': sha256_hashes['message.js']
111
    }],
112
    'generated_by': expected_generated_by
113
}]
114

    
115
expected_mapping = {
116
    '$schema': 'https://hydrilla.koszko.org/schemas/api_mapping_description-1.schema.json',
117
    'source_name': 'hello',
118
    'source_copyright': [{
119
        'file': 'report.spdx',
120
        'sha256': sha256_hashes['report.spdx']
121
    }, {
122
        'file': 'LICENSES/CC0-1.0.txt',
123
        'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
124
    }],
125
    'type': 'mapping',
126
    'identifier': 'helloapple',
127
    'long_name': 'Hello Apple',
128
    'uuid': '54d23bba-472e-42f5-9194-eaa24c0e3ee7',
129
    'version': [2021, 11, 10],
130
    'description': 'causes apple to get greeted on Hydrillabugs issue tracker',
131
    'payloads': {
132
	'https://hydrillabugs.koszko.org/***': {
133
	    'identifier': 'helloapple'
134
	},
135
	'https://hachettebugs.koszko.org/***': {
136
	    'identifier': 'helloapple'
137
        }
138
    },
139
    'generated_by': expected_generated_by
140
}
141

    
142
expected_source_description = {
143
    '$schema': 'https://hydrilla.koszko.org/schemas/api_source_description-1.schema.json',
144
    'source_name': 'hello',
145
    'source_copyright': [{
146
        'file': 'report.spdx',
147
        'sha256': sha256_hashes['report.spdx']
148
    }, {
149
        'file': 'LICENSES/CC0-1.0.txt',
150
        'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
151
    }],
152
    'source_archives': {
153
        'zip': {
154
            'sha256': '!!!!value to fill during test!!!!',
155
        }
156
    },
157
    'upstream_url': 'https://git.koszko.org/hydrilla-source-package-example',
158
    'definitions': [{
159
        'type': 'resource',
160
        'identifier': 'helloapple',
161
        'long_name': 'Hello Apple',
162
        'version': [2021, 11, 10],
163
    }, {
164
        'type':       'resource',
165
        'identifier': 'hello-message',
166
        'long_name': 'Hello Message',
167
        'version':     [2021, 11, 10],
168
    }, {
169
        'type': 'mapping',
170
        'identifier': 'helloapple',
171
	'long_name': 'Hello Apple',
172
        'version': [2021, 11, 10],
173
    }],
174
    'generated_by': expected_generated_by
175
}
176

    
177
expected = [*expected_resources, expected_mapping, expected_source_description]
178

    
179
@pytest.fixture
180
def tmpdir() -> Iterable[str]:
181
    """
182
    Provide test case with a temporary directory that will be automatically
183
    deleted after the test.
184
    """
185
    with TemporaryDirectory() as tmpdir:
186
        yield Path(tmpdir)
187

    
188
def run_reuse(command, **kwargs):
189
    """
190
    Instead of running a 'reuse' command, check if 'mock_reuse_missing' file
191
    exists under root directory. If yes, raise FileNotFoundError as if 'reuse'
192
    command was missing. If not, check if 'README.txt.license' file exists
193
    in the requested directory and return zero if it does.
194
    """
195
    expected = ['reuse', '--root', '<root>',
196
                'lint' if 'lint' in command else 'spdx']
197

    
198
    root_path = Path(process_command(command, expected)['root'])
199

    
200
    if (root_path / 'mock_reuse_missing').exists():
201
        raise FileNotFoundError('dummy')
202

    
203
    is_reuse_compliant = (root_path / 'README.txt.license').exists()
204

    
205
    return MockedCompletedProcess(command, 1 - is_reuse_compliant,
206
                                  stdout=f'dummy {expected[-1]} output',
207
                                  text_output=kwargs.get('text'))
208

    
209
mocked_piggybacked_archives = [
210
    PurePosixPath('apt/something.deb'),
211
    PurePosixPath('apt/something.orig.tar.gz'),
212
    PurePosixPath('apt/something.debian.tar.xz'),
213
    PurePosixPath('othersystem/other-something.tar.gz')
214
]
215

    
216
@pytest.fixture
217
def mock_piggybacked_apt_system(monkeypatch):
218
    """Make local_apt.piggybacked_system() return a mocked result."""
219
    # We set 'td' to a temporary dir path further below.
220
    td = None
221

    
222
    class MockedPiggybacked:
223
        """Minimal mock of Piggybacked object."""
224
        package_license_files = [PurePosixPath('.apt-root/.../copyright')]
225
        package_must_depend = [{'identifier': 'apt-common-licenses'}]
226

    
227
        def resolve_file(path):
228
            """
229
            For each path that starts with '.apt-root' return a valid
230
            dummy file path.
231
            """
232
            if path.parts[0] != '.apt-root':
233
                return None
234

    
235
            (td / path.name).write_text(f'dummy {path.name}')
236

    
237
            return (td / path.name)
238

    
239
        def archive_files():
240
            """Yield some valid dummy file path tuples."""
241
            for desired_path in mocked_piggybacked_archives:
242
                real_path = td / desired_path.name
243
                real_path.write_text(f'dummy {desired_path.name}')
244

    
245
                yield desired_path, real_path
246

    
247
    @contextmanager
248
    def mocked_piggybacked_system(piggyback_def, piggyback_files):
249
        """Mock the execution of local_apt.piggybacked_system()."""
250
        assert piggyback_def == {
251
	    'system': 'apt',
252
	    'distribution': 'nabia',
253
	    'packages': ['somelib=1.0'],
254
	    'dependencies': False
255
        }
256
        if piggyback_files is not None:
257
            assert {str(path) for path in mocked_piggybacked_archives} == \
258
                {path.relative_to(piggyback_files).as_posix()
259
                 for path in piggyback_files.rglob('*') if path.is_file()}
260

    
261
        yield MockedPiggybacked
262

    
263
    monkeypatch.setattr(local_apt, 'piggybacked_system',
264
                        mocked_piggybacked_system)
265

    
266
    with TemporaryDirectory() as td:
267
        td = Path(td)
268
        yield
269

    
270
@pytest.fixture
271
def sample_source():
272
    """Prepare a directory with sample Haketilo source package."""
273
    with TemporaryDirectory() as td:
274
        sample_source = Path(td) / 'hello'
275
        for name, contents in src_files.items():
276
            path = sample_source / name
277
            path.parent.mkdir(parents=True, exist_ok=True)
278
            path.write_bytes(contents)
279

    
280
        yield sample_source
281

    
282
variant_makers = []
283
def variant_maker(function):
284
    """Decorate function by placing it in variant_makers array."""
285
    variant_makers.append(function)
286
    return function
287

    
288
@variant_maker
289
def sample_source_change_index_json(monkeypatch, sample_source):
290
    """
291
    Return a non-standard path for index.json. Ensure parent directories exist.
292
    """
293
    # Use a path under sample_source so that it gets auto-deleted after the
294
    # test. Use a file under .git because .git is ignored by REUSE.
295
    path = sample_source / '.git' / 'replacement.json'
296
    path.parent.mkdir()
297
    return path
298

    
299
@variant_maker
300
def sample_source_add_comments(monkeypatch, sample_source):
301
    """Add index.json comments that should be preserved."""
302
    for dictionary in (index_obj, expected_source_description):
303
        monkeypatch.setitem(dictionary, 'comment', 'index.json comment')
304

    
305
    for i, dicts in enumerate(zip(index_obj['definitions'], expected)):
306
        for dictionary in dicts:
307
            monkeypatch.setitem(dictionary, 'comment', 'index.json comment')
308

    
309
@variant_maker
310
def sample_source_remove_spdx(monkeypatch, sample_source):
311
    """Remove spdx report generation."""
312
    monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
313

    
314
    for obj, key in [
315
            (index_obj, 'copyright'),
316
            *((definition, 'source_copyright') for definition in expected)
317
    ]:
318
        new_list = [r for r in obj[key] if r['file'] != 'report.spdx']
319
        monkeypatch.setitem(obj, key, new_list)
320

    
321
    monkeypatch.delitem(dist_files, 'report.spdx')
322

    
323
    # To verify that reuse does not get called now, make mocked subprocess.run()
324
    # raise an error if called.
325
    (sample_source / 'mock_reuse_missing').touch()
326

    
327
@variant_maker
328
def sample_source_remove_additional_files(monkeypatch, sample_source):
329
    """Use default value ([]) for 'additionall_files' property."""
330
    monkeypatch.delitem(index_obj, 'additional_files')
331

    
332
    for name in 'README.txt', 'README.txt.license', '.reuse/dep5':
333
        monkeypatch.delitem(src_files, name)
334

    
335
@variant_maker
336
def sample_source_remove_script(monkeypatch, sample_source):
337
    """Use default value ([]) for 'scripts' property in one of the resources."""
338
    monkeypatch.delitem(index_obj['definitions'][1], 'scripts')
339

    
340
    monkeypatch.setitem(expected_resources[1], 'scripts', [])
341

    
342
    for files in dist_files, src_files:
343
        monkeypatch.delitem(files, 'message.js')
344

    
345
@variant_maker
346
def sample_source_remove_payloads(monkeypatch, sample_source):
347
    """Use default value ({}) for 'payloads' property in mapping."""
348
    monkeypatch.delitem(index_obj['definitions'][2], 'payloads')
349

    
350
    monkeypatch.setitem(expected_mapping, 'payloads', {})
351

    
352
@variant_maker
353
def sample_source_remove_uuids(monkeypatch, sample_source):
354
    """Don't use UUIDs (they are optional)."""
355
    for definition in index_obj['definitions']:
356
        monkeypatch.delitem(definition, 'uuid')
357

    
358
    for description in expected:
359
        if 'uuid' in description:
360
            monkeypatch.delitem(description, 'uuid')
361

    
362
@variant_maker
363
def sample_source_add_extra_props(monkeypatch, sample_source):
364
    """Add some unrecognized properties that should be stripped."""
365
    to_process = [index_obj]
366
    while to_process:
367
        processed = to_process.pop()
368

    
369
        if type(processed) is list:
370
            to_process.extend(processed)
371
        elif type(processed) is dict and 'spurious_property' not in processed:
372
            to_process.extend(v for k, v in processed.items()
373
                              if k != 'payloads')
374
            monkeypatch.setitem(processed, 'spurious_property', 'some_value')
375

    
376
piggyback_archive_names = [
377
    'apt/something.deb',
378
    'apt/something.orig.tar.gz',
379
    'apt/something.debian.tar.xz',
380
    'othersystem/other-something.tar.gz'
381
]
382

    
383
@variant_maker
384
def sample_source_add_piggyback(monkeypatch, sample_source,
385
                                extra_build_args={}):
386
    """Add piggybacked foreign system packages."""
387
    old_build = build.Build
388
    new_build = lambda *a, **kwa: old_build(*a, **kwa, **extra_build_args)
389
    monkeypatch.setattr(build, 'Build', new_build)
390

    
391
    monkeypatch.setitem(index_obj, 'piggyback_on', {
392
	'system': 'apt',
393
	'distribution': 'nabia',
394
	'packages': ['somelib=1.0'],
395
	'dependencies': False
396
    })
397
    schema = 'https://hydrilla.koszko.org/schemas/package_source-2.schema.json'
398
    monkeypatch.setitem(index_obj, '$schema', schema)
399

    
400
    new_refs = {}
401
    for name in '.apt-root/.../copyright', '.apt-root/.../script.js':
402
        contents = f'dummy {PurePosixPath(name).name}'.encode()
403
        digest = sha256(contents).digest().hex()
404
        monkeypatch.setitem(dist_files, name, contents)
405
        monkeypatch.setitem(sha256_hashes, name, digest)
406
        new_refs[PurePosixPath(name).name] = {'file': name, 'sha256': digest}
407

    
408
    for obj in expected:
409
        new_list = [*obj['source_copyright'], new_refs['copyright']]
410
        monkeypatch.setitem(obj, 'source_copyright', new_list)
411

    
412
    for obj in expected_resources:
413
        new_list = [{'identifier': 'apt-common-licenses'}, *obj['dependencies']]
414
        monkeypatch.setitem(obj, 'dependencies', new_list)
415

    
416
    for obj in index_obj['definitions'][0], expected_resources[0]:
417
        new_list = [new_refs['script.js'], *obj['scripts']]
418
        monkeypatch.setitem(obj, 'scripts', new_list)
419

    
420
    for name in piggyback_archive_names:
421
        path = PurePosixPath('hello.foreign-packages') / name
422
        monkeypatch.setitem(extra_archive_files, str(path),
423
                            f'dummy {path.name}'.encode())
424

    
425
def prepare_foreign_packages_dir(path):
426
    """
427
    Put some dummy archive in the directory so that it can be passed to
428
    piggybacked_system().
429
    """
430
    for name in piggyback_archive_names:
431
        archive_path = path / name
432
        archive_path.parent.mkdir(parents=True, exist_ok=True)
433
        archive_path.write_text(f'dummy {archive_path.name}')
434

    
435
@variant_maker
436
def sample_source_add_piggyback_pass_archives(monkeypatch, sample_source):
437
    """
438
    Add piggybacked foreign system packages, use pre-downloaded foreign package
439
    archives (have Build() find them in their default directory).
440
    """
441
    # Dir next to 'sample_source' will also be gc'd by sample_source() fixture.
442
    foreign_packages_dir = sample_source.parent / 'arbitrary-name'
443

    
444
    prepare_foreign_packages_dir(foreign_packages_dir)
445

    
446
    sample_source_add_piggyback(monkeypatch, sample_source,
447
                                {'piggyback_files': foreign_packages_dir})
448

    
449
@variant_maker
450
def sample_source_add_piggyback_find_archives(monkeypatch, sample_source):
451
    """
452
    Add piggybacked foreign system packages, use pre-downloaded foreign package
453
    archives (specify their directory as argument to Build()).
454
    """
455
    # Dir next to 'sample_source' will also be gc'd by sample_source() fixture.
456
    foreign_packages_dir = sample_source.parent / 'hello.foreign-packages'
457

    
458
    prepare_foreign_packages_dir(foreign_packages_dir)
459

    
460
    sample_source_add_piggyback(monkeypatch, sample_source)
461

    
462
@variant_maker
463
def sample_source_add_piggyback_no_download(monkeypatch, sample_source,
464
                                            pass_directory_to_build=False):
465
    """
466
    Add piggybacked foreign system packages, use pre-downloaded foreign package
467
    archives.
468
    """
469
    # Use a dir next to 'sample_source'; have it gc'd by sample_source fixture.
470
    if pass_directory_to_build:
471
        foreign_packages_dir = sample_source.parent / 'arbitrary-name'
472
    else:
473
        foreign_packages_dir = sample_source.parent / 'hello.foreign-packages'
474

    
475
    prepare_foreign_packages_dir(foreign_packages_dir)
476

    
477
    sample_source_add_piggyback(monkeypatch, sample_source)
478

    
479
@pytest.fixture(params=[lambda m, s: None, *variant_makers])
480
def sample_source_make_variants(request, monkeypatch, sample_source,
481
                                mock_piggybacked_apt_system):
482
    """
483
    Prepare a directory with sample Haketilo source package in multiple slightly
484
    different versions (all correct). Return an index.json path that should be
485
    used when performing test build.
486
    """
487
    index_path = request.param(monkeypatch, sample_source) or Path('index.json')
488

    
489
    index_text = json.dumps(index_obj)
490

    
491
    (sample_source / index_path).write_text(index_text)
492

    
493
    monkeypatch.setitem(src_files, 'index.json', index_text.encode())
494

    
495
    return index_path
496

    
497
@pytest.mark.subprocess_run(build, run_reuse)
498
@pytest.mark.usefixtures('mock_subprocess_run')
499
def test_build(sample_source, sample_source_make_variants, tmpdir):
500
    """Build the sample source package and verify the produced files."""
501
    index_json_path = sample_source_make_variants
502

    
503
    # First, build the package
504
    build.Build(sample_source, index_json_path).write_package_files(tmpdir)
505

    
506
    # Verify directories under destination directory
507
    assert {'file', 'resource', 'mapping', 'source'} == \
508
        set([path.name for path in tmpdir.iterdir()])
509

    
510
    # Verify files under 'file/'
511
    file_dir = tmpdir / 'file' / 'sha256'
512

    
513
    for name, contents in dist_files.items():
514
        dist_file_path = file_dir / sha256_hashes[name]
515
        assert dist_file_path.is_file()
516
        assert dist_file_path.read_bytes() == contents
517

    
518
    assert {p.name for p in file_dir.iterdir()} == \
519
        {sha256_hashes[name] for name in dist_files.keys()}
520

    
521
    # Verify files under 'resource/'
522
    resource_dir = tmpdir / 'resource'
523

    
524
    assert {rj['identifier'] for rj in expected_resources} == \
525
        {path.name for path in resource_dir.iterdir()}
526

    
527
    for resource_json in expected_resources:
528
        subdir = resource_dir / resource_json['identifier']
529
        assert ['2021.11.10'] == [path.name for path in subdir.iterdir()]
530

    
531
        assert json.loads((subdir / '2021.11.10').read_text()) == resource_json
532

    
533
        hydrilla_util.validator_for('api_resource_description-1.0.1.schema.json')\
534
                     .validate(resource_json)
535

    
536
    # Verify files under 'mapping/'
537
    mapping_dir = tmpdir / 'mapping'
538
    assert ['helloapple'] == [path.name for path in mapping_dir.iterdir()]
539

    
540
    subdir = mapping_dir / 'helloapple'
541
    assert ['2021.11.10'] == [path.name for path in subdir.iterdir()]
542

    
543
    assert json.loads((subdir / '2021.11.10').read_text()) == expected_mapping
544

    
545
    hydrilla_util.validator_for('api_mapping_description-1.0.1.schema.json')\
546
                 .validate(expected_mapping)
547

    
548
    # Verify files under 'source/'
549
    source_dir = tmpdir / 'source'
550
    assert {'hello.json', 'hello.zip'} == \
551
        {path.name for path in source_dir.iterdir()}
552

    
553
    archive_files = {**dict((f'hello/{name}', contents)
554
                            for name, contents in src_files.items()),
555
                     **extra_archive_files}
556

    
557
    with ZipFile(source_dir / 'hello.zip', 'r') as archive:
558
        print(archive.namelist())
559
        assert len(archive.namelist()) == len(archive_files)
560

    
561
        for name, contents in archive_files.items():
562
            assert archive.read(name) == contents
563

    
564
    zip_ref = expected_source_description['source_archives']['zip']
565
    zip_contents = (source_dir / 'hello.zip').read_bytes()
566
    zip_ref['sha256'] = sha256(zip_contents).digest().hex()
567

    
568
    assert json.loads((source_dir / 'hello.json').read_text()) == \
569
        expected_source_description
570

    
571
    hydrilla_util.validator_for('api_source_description-1.0.1.schema.json')\
572
                 .validate(expected_source_description)
573

    
574
error_makers = []
575
def error_maker(function):
576
    """Decorate function by placing it in error_makers array."""
577
    error_makers.append(function)
578

    
579
@error_maker
580
def sample_source_error_missing_file(monkeypatch, sample_source):
581
    """
582
    Modify index.json to expect missing report.spdx file and cause an error.
583
    """
584
    monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
585
    return FileNotFoundError
586

    
587
@error_maker
588
def sample_source_error_index_schema(monkeypatch, sample_source):
589
    """Modify index.json to be incompliant with the schema."""
590
    monkeypatch.delitem(index_obj, 'definitions')
591
    return ValidationError
592

    
593
@error_maker
594
def sample_source_error_bad_comment(monkeypatch, sample_source):
595
    """Modify index.json to have an invalid '/' in it."""
596
    return json.JSONDecodeError, json.dumps(index_obj) + '/something\n'
597

    
598
@error_maker
599
def sample_source_error_bad_json(monkeypatch, sample_source):
600
    """Modify index.json to not be valid json even after comment stripping."""
601
    return json.JSONDecodeError, json.dumps(index_obj) + '???/\n'
602

    
603
@error_maker
604
def sample_source_error_missing_reuse(monkeypatch, sample_source):
605
    """Cause mocked reuse process invocation to fail with FileNotFoundError."""
606
    (sample_source / 'mock_reuse_missing').touch()
607
    return build.ReuseError
608

    
609
@error_maker
610
def sample_source_error_missing_license(monkeypatch, sample_source):
611
    """Remove a file to make package REUSE-incompliant."""
612
    (sample_source / 'README.txt.license').unlink()
613
    return build.ReuseError
614

    
615
@error_maker
616
def sample_source_error_file_outside(monkeypatch, sample_source):
617
    """Make index.json illegally reference a file outside srcdir."""
618
    new_list = [*index_obj['copyright'], {'file': '../abc'}]
619
    monkeypatch.setitem(index_obj, 'copyright', new_list)
620
    return FileReferenceError
621

    
622
@error_maker
623
def sample_source_error_reference_itself(monkeypatch, sample_source):
624
    """Make index.json illegally reference index.json."""
625
    new_list = [*index_obj['copyright'], {'file': 'index.json'}]
626
    monkeypatch.setitem(index_obj, 'copyright', new_list)
627
    return FileReferenceError
628

    
629
@error_maker
630
def sample_source_error_report_excluded(monkeypatch, sample_source):
631
    """
632
    Make index.json require generation of report.spdx but don't include it among
633
    copyright files.
634
    """
635
    new_list = [file_ref for file_ref in index_obj['copyright']
636
                if file_ref['file'] != 'report.spdx']
637
    monkeypatch.setitem(index_obj, 'copyright', new_list)
638
    return FileReferenceError
639

    
640
@pytest.fixture(params=error_makers)
641
def sample_source_make_errors(request, monkeypatch, sample_source):
642
    """
643
    Prepare a directory with sample Haketilo source package in multiple slightly
644
    broken versions. Return an error type that should be raised when running
645
    test build.
646
    """
647
    index_text = None
648
    error_type = request.param(monkeypatch, sample_source)
649
    if type(error_type) is tuple:
650
        error_type, index_text = error_type
651

    
652
    index_text = index_text or json.dumps(index_obj)
653

    
654
    (sample_source / 'index.json').write_text(index_text)
655

    
656
    monkeypatch.setitem(src_files, 'index.json', index_text.encode())
657

    
658
    return error_type
659

    
660
@pytest.mark.subprocess_run(build, run_reuse)
661
@pytest.mark.usefixtures('mock_subprocess_run')
662
def test_build_error(tmpdir, sample_source, sample_source_make_errors):
663
    """Try building the sample source package and verify generated errors."""
664
    error_type = sample_source_make_errors
665

    
666
    dstdir = Path(tmpdir) / 'dstdir'
667
    tmpdir = Path(tmpdir) / 'example'
668

    
669
    dstdir.mkdir(exist_ok=True)
670
    tmpdir.mkdir(exist_ok=True)
671

    
672
    with pytest.raises(error_type):
673
        build.Build(sample_source, Path('index.json'))\
674
             .write_package_files(dstdir)
(4-4/5)