Project

General

Profile

Download (23.5 KB) Statistics
| Branch: | Tag: | Revision:

hydrilla-builder / tests / test_build.py @ f42f5c19

1
# SPDX-License-Identifier: CC0-1.0
2

    
3
# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
4
#
5
# Available under the terms of Creative Commons Zero v1.0 Universal.
6

    
7
# Enable using with Python 3.7.
8
from __future__ import annotations
9

    
10
import pytest
11
import json
12
import shutil
13

    
14
from tempfile import TemporaryDirectory
15
from pathlib import Path, PurePosixPath
16
from hashlib import sha256
17
from zipfile import ZipFile
18
from contextlib import contextmanager
19

    
20
from jsonschema import ValidationError
21

    
22
from hydrilla import util as hydrilla_util
23
from hydrilla.builder import build, _version, local_apt
24
from hydrilla.builder.common_errors import *
25

    
26
from .helpers import *
27

    
28
here = Path(__file__).resolve().parent
29

    
30
expected_generated_by = {
31
    'name': 'hydrilla.builder',
32
    'version': _version.version
33
}
34

    
35
orig_srcdir = here / 'source-package-example'
36

    
37
index_obj, _ = hydrilla_util.load_instance_from_file(orig_srcdir / 'index.json')
38

    
39
def read_files(*file_list):
40
    """
41
    Take names of files under srcdir and return a dict that maps them to their
42
    contents (as bytes).
43
    """
44
    return dict((name, (orig_srcdir / name).read_bytes()) for name in file_list)
45

    
46
dist_files = {
47
    **read_files('LICENSES/CC0-1.0.txt', 'bye.js', 'hello.js', 'message.js'),
48
    'report.spdx': b'dummy spdx output'
49
}
50
src_files = {
51
    **dist_files,
52
    **read_files('README.txt', 'README.txt.license', '.reuse/dep5',
53
                 'index.json')
54
}
55
extra_archive_files = {
56
}
57

    
58
sha256_hashes = dict((name, sha256(contents).digest().hex())
59
                     for name, contents in src_files.items())
60

    
61
del src_files['report.spdx']
62

    
63
expected_resources = [{
64
    '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
65
    'source_name': 'hello',
66
    'source_copyright': [{
67
        'file': 'report.spdx',
68
        'sha256': sha256_hashes['report.spdx']
69
    }, {
70
        'file': 'LICENSES/CC0-1.0.txt',
71
        'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
72
    }],
73
    'type': 'resource',
74
    'identifier': 'helloapple',
75
    'long_name': 'Hello Apple',
76
    'uuid': 'a6754dcb-58d8-4b7a-a245-24fd7ad4cd68',
77
    'version': [2021, 11, 10],
78
    'revision': 1,
79
    'description': 'greets an apple',
80
    'dependencies': [{'identifier': 'hello-message'}],
81
    'scripts': [{
82
        'file': 'hello.js',
83
        'sha256': sha256_hashes['hello.js']
84
    }, {
85
        'file': 'bye.js',
86
        'sha256': sha256_hashes['bye.js']
87
    }],
88
    'generated_by': expected_generated_by
89
}, {
90
    '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
91
    'source_name': 'hello',
92
    'source_copyright': [{
93
        'file': 'report.spdx',
94
        'sha256': sha256_hashes['report.spdx']
95
    }, {
96
        'file': 'LICENSES/CC0-1.0.txt',
97
        'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
98
    }],
99
    'type': 'resource',
100
    'identifier': 'hello-message',
101
    'long_name': 'Hello Message',
102
    'uuid': '1ec36229-298c-4b35-8105-c4f2e1b9811e',
103
    'version': [2021, 11, 10],
104
    'revision': 2,
105
    'description': 'define messages for saying hello and bye',
106
    'dependencies': [],
107
    'scripts': [{
108
        'file': 'message.js',
109
        'sha256': sha256_hashes['message.js']
110
    }],
111
    'generated_by': expected_generated_by
112
}]
113

    
114
expected_mapping = {
115
    '$schema': 'https://hydrilla.koszko.org/schemas/api_mapping_description-1.schema.json',
116
    'source_name': 'hello',
117
    'source_copyright': [{
118
        'file': 'report.spdx',
119
        'sha256': sha256_hashes['report.spdx']
120
    }, {
121
        'file': 'LICENSES/CC0-1.0.txt',
122
        'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
123
    }],
124
    'type': 'mapping',
125
    'identifier': 'helloapple',
126
    'long_name': 'Hello Apple',
127
    'uuid': '54d23bba-472e-42f5-9194-eaa24c0e3ee7',
128
    'version': [2021, 11, 10],
129
    'description': 'causes apple to get greeted on Hydrillabugs issue tracker',
130
    'payloads': {
131
	'https://hydrillabugs.koszko.org/***': {
132
	    'identifier': 'helloapple'
133
	},
134
	'https://hachettebugs.koszko.org/***': {
135
	    'identifier': 'helloapple'
136
        }
137
    },
138
    'generated_by': expected_generated_by
139
}
140

    
141
expected_source_description = {
142
    '$schema': 'https://hydrilla.koszko.org/schemas/api_source_description-1.schema.json',
143
    'source_name': 'hello',
144
    'source_copyright': [{
145
        'file': 'report.spdx',
146
        'sha256': sha256_hashes['report.spdx']
147
    }, {
148
        'file': 'LICENSES/CC0-1.0.txt',
149
        'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
150
    }],
151
    'source_archives': {
152
        'zip': {
153
            'sha256': '!!!!value to fill during test!!!!',
154
        }
155
    },
156
    'upstream_url': 'https://git.koszko.org/hydrilla-source-package-example',
157
    'definitions': [{
158
        'type': 'resource',
159
        'identifier': 'helloapple',
160
        'long_name': 'Hello Apple',
161
        'version': [2021, 11, 10],
162
    }, {
163
        'type':       'resource',
164
        'identifier': 'hello-message',
165
        'long_name': 'Hello Message',
166
        'version':     [2021, 11, 10],
167
    }, {
168
        'type': 'mapping',
169
        'identifier': 'helloapple',
170
	'long_name': 'Hello Apple',
171
        'version': [2021, 11, 10],
172
    }],
173
    'generated_by': expected_generated_by
174
}
175

    
176
expected = [*expected_resources, expected_mapping, expected_source_description]
177

    
178
def run_reuse(command, **kwargs):
179
    """
180
    Instead of running a 'reuse' command, check if 'mock_reuse_missing' file
181
    exists under root directory. If yes, raise FileNotFoundError as if 'reuse'
182
    command was missing. If not, check if 'README.txt.license' file exists
183
    in the requested directory and return zero if it does.
184
    """
185
    expected = ['reuse', '--root', '<root>',
186
                'lint' if 'lint' in command else 'spdx']
187

    
188
    root_path = Path(process_command(command, expected)['root'])
189

    
190
    if (root_path / 'mock_reuse_missing').exists():
191
        raise FileNotFoundError('dummy')
192

    
193
    is_reuse_compliant = (root_path / 'README.txt.license').exists()
194

    
195
    return MockedCompletedProcess(command, 1 - is_reuse_compliant,
196
                                  stdout=f'dummy {expected[-1]} output',
197
                                  text_output=kwargs.get('text'))
198

    
199
mocked_piggybacked_archives = [
200
    PurePosixPath('apt/something.deb'),
201
    PurePosixPath('apt/something.orig.tar.gz'),
202
    PurePosixPath('apt/something.debian.tar.xz'),
203
    PurePosixPath('othersystem/other-something.tar.gz')
204
]
205

    
206
@pytest.fixture
207
def mock_piggybacked_apt_system(monkeypatch):
208
    """Make local_apt.piggybacked_system() return a mocked result."""
209
    # We set 'td' to a temporary dir path further below.
210
    td = None
211

    
212
    class MockedPiggybacked:
213
        """Minimal mock of Piggybacked object."""
214
        package_license_files = [PurePosixPath('.apt-root/.../copyright')]
215
        package_must_depend = [{'identifier': 'apt-common-licenses'}]
216

    
217
        def resolve_file(path):
218
            """
219
            For each path that starts with '.apt-root' return a valid
220
            dummy file path.
221
            """
222
            if path.parts[0] != '.apt-root':
223
                return None
224

    
225
            (td / path.name).write_text(f'dummy {path.name}')
226

    
227
            return (td / path.name)
228

    
229
        def archive_files():
230
            """Yield some valid dummy file path tuples."""
231
            for desired_path in mocked_piggybacked_archives:
232
                real_path = td / desired_path.name
233
                real_path.write_text(f'dummy {desired_path.name}')
234

    
235
                yield desired_path, real_path
236

    
237
    @contextmanager
238
    def mocked_piggybacked_system(piggyback_def, piggyback_files):
239
        """Mock the execution of local_apt.piggybacked_system()."""
240
        assert piggyback_def == {
241
	    'system': 'apt',
242
	    'distribution': 'nabia',
243
	    'packages': ['somelib=1.0'],
244
	    'dependencies': False
245
        }
246
        if piggyback_files is not None:
247
            assert {str(path) for path in mocked_piggybacked_archives} == \
248
                {path.relative_to(piggyback_files).as_posix()
249
                 for path in piggyback_files.rglob('*') if path.is_file()}
250

    
251
        yield MockedPiggybacked
252

    
253
    monkeypatch.setattr(local_apt, 'piggybacked_system',
254
                        mocked_piggybacked_system)
255

    
256
    with TemporaryDirectory() as td:
257
        td = Path(td)
258
        yield
259

    
260
@pytest.fixture
261
def sample_source():
262
    """Prepare a directory with sample Haketilo source package."""
263
    with TemporaryDirectory() as td:
264
        sample_source = Path(td) / 'hello'
265
        for name, contents in src_files.items():
266
            path = sample_source / name
267
            path.parent.mkdir(parents=True, exist_ok=True)
268
            path.write_bytes(contents)
269

    
270
        yield sample_source
271

    
272
variant_makers = []
273
def variant_maker(function):
274
    """Decorate function by placing it in variant_makers array."""
275
    variant_makers.append(function)
276
    return function
277

    
278
@variant_maker
279
def sample_source_change_index_json(monkeypatch, sample_source):
280
    """
281
    Return a non-standard path for index.json. Ensure parent directories exist.
282
    """
283
    # Use a path under sample_source so that it gets auto-deleted after the
284
    # test. Use a file under .git because .git is ignored by REUSE.
285
    path = sample_source / '.git' / 'replacement.json'
286
    path.parent.mkdir()
287
    return path
288

    
289
@variant_maker
290
def sample_source_add_comments(monkeypatch, sample_source):
291
    """Add index.json comments that should be preserved."""
292
    for dictionary in (index_obj, expected_source_description):
293
        monkeypatch.setitem(dictionary, 'comment', 'index.json comment')
294

    
295
    for i, dicts in enumerate(zip(index_obj['definitions'], expected)):
296
        for dictionary in dicts:
297
            monkeypatch.setitem(dictionary, 'comment', 'index.json comment')
298

    
299
@variant_maker
300
def sample_source_remove_spdx(monkeypatch, sample_source):
301
    """Remove spdx report generation."""
302
    monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
303

    
304
    for obj, key in [
305
            (index_obj, 'copyright'),
306
            *((definition, 'source_copyright') for definition in expected)
307
    ]:
308
        new_list = [r for r in obj[key] if r['file'] != 'report.spdx']
309
        monkeypatch.setitem(obj, key, new_list)
310

    
311
    monkeypatch.delitem(dist_files, 'report.spdx')
312

    
313
    # To verify that reuse does not get called now, make mocked subprocess.run()
314
    # raise an error if called.
315
    (sample_source / 'mock_reuse_missing').touch()
316

    
317
@variant_maker
318
def sample_source_remove_additional_files(monkeypatch, sample_source):
319
    """Use default value ([]) for 'additionall_files' property."""
320
    monkeypatch.delitem(index_obj, 'additional_files')
321

    
322
    for name in 'README.txt', 'README.txt.license', '.reuse/dep5':
323
        monkeypatch.delitem(src_files, name)
324

    
325
@variant_maker
326
def sample_source_remove_script(monkeypatch, sample_source):
327
    """Use default value ([]) for 'scripts' property in one of the resources."""
328
    monkeypatch.delitem(index_obj['definitions'][1], 'scripts')
329

    
330
    monkeypatch.setitem(expected_resources[1], 'scripts', [])
331

    
332
    for files in dist_files, src_files:
333
        monkeypatch.delitem(files, 'message.js')
334

    
335
@variant_maker
336
def sample_source_remove_payloads(monkeypatch, sample_source):
337
    """Use default value ({}) for 'payloads' property in mapping."""
338
    monkeypatch.delitem(index_obj['definitions'][2], 'payloads')
339

    
340
    monkeypatch.setitem(expected_mapping, 'payloads', {})
341

    
342
@variant_maker
343
def sample_source_remove_uuids(monkeypatch, sample_source):
344
    """Don't use UUIDs (they are optional)."""
345
    for definition in index_obj['definitions']:
346
        monkeypatch.delitem(definition, 'uuid')
347

    
348
    for description in expected:
349
        if 'uuid' in description:
350
            monkeypatch.delitem(description, 'uuid')
351

    
352
@variant_maker
353
def sample_source_add_extra_props(monkeypatch, sample_source):
354
    """Add some unrecognized properties that should be stripped."""
355
    to_process = [index_obj]
356
    while to_process:
357
        processed = to_process.pop()
358

    
359
        if type(processed) is list:
360
            to_process.extend(processed)
361
        elif type(processed) is dict and 'spurious_property' not in processed:
362
            to_process.extend(v for k, v in processed.items()
363
                              if k != 'payloads')
364
            monkeypatch.setitem(processed, 'spurious_property', 'some_value')
365

    
366
piggyback_archive_names = [
367
    'apt/something.deb',
368
    'apt/something.orig.tar.gz',
369
    'apt/something.debian.tar.xz',
370
    'othersystem/other-something.tar.gz'
371
]
372

    
373
@variant_maker
374
def sample_source_add_piggyback(monkeypatch, sample_source,
375
                                extra_build_args={}):
376
    """Add piggybacked foreign system packages."""
377
    old_build = build.Build
378
    new_build = lambda *a, **kwa: old_build(*a, **kwa, **extra_build_args)
379
    monkeypatch.setattr(build, 'Build', new_build)
380

    
381
    monkeypatch.setitem(index_obj, 'piggyback_on', {
382
	'system': 'apt',
383
	'distribution': 'nabia',
384
	'packages': ['somelib=1.0'],
385
	'dependencies': False
386
    })
387
    schema = 'https://hydrilla.koszko.org/schemas/package_source-2.schema.json'
388
    monkeypatch.setitem(index_obj, '$schema', schema)
389

    
390
    new_refs = {}
391
    for name in '.apt-root/.../copyright', '.apt-root/.../script.js':
392
        contents = f'dummy {PurePosixPath(name).name}'.encode()
393
        digest = sha256(contents).digest().hex()
394
        monkeypatch.setitem(dist_files, name, contents)
395
        monkeypatch.setitem(sha256_hashes, name, digest)
396
        new_refs[PurePosixPath(name).name] = {'file': name, 'sha256': digest}
397

    
398
    for obj in expected:
399
        new_list = [*obj['source_copyright'], new_refs['copyright']]
400
        monkeypatch.setitem(obj, 'source_copyright', new_list)
401

    
402
    for obj in expected_resources:
403
        new_list = [{'identifier': 'apt-common-licenses'}, *obj['dependencies']]
404
        monkeypatch.setitem(obj, 'dependencies', new_list)
405

    
406
    for obj in index_obj['definitions'][0], expected_resources[0]:
407
        new_list = [new_refs['script.js'], *obj['scripts']]
408
        monkeypatch.setitem(obj, 'scripts', new_list)
409

    
410
    for name in piggyback_archive_names:
411
        path = PurePosixPath('hello.foreign-packages') / name
412
        monkeypatch.setitem(extra_archive_files, str(path),
413
                            f'dummy {path.name}'.encode())
414

    
415
def prepare_foreign_packages_dir(path):
416
    """
417
    Put some dummy archive in the directory so that it can be passed to
418
    piggybacked_system().
419
    """
420
    for name in piggyback_archive_names:
421
        archive_path = path / name
422
        archive_path.parent.mkdir(parents=True, exist_ok=True)
423
        archive_path.write_text(f'dummy {archive_path.name}')
424

    
425
@variant_maker
426
def sample_source_add_piggyback_pass_archives(monkeypatch, sample_source):
427
    """
428
    Add piggybacked foreign system packages, use pre-downloaded foreign package
429
    archives (have Build() find them in their default directory).
430
    """
431
    # Dir next to 'sample_source' will also be gc'd by sample_source() fixture.
432
    foreign_packages_dir = sample_source.parent / 'arbitrary-name'
433

    
434
    prepare_foreign_packages_dir(foreign_packages_dir)
435

    
436
    sample_source_add_piggyback(monkeypatch, sample_source,
437
                                {'piggyback_files': foreign_packages_dir})
438

    
439
@variant_maker
440
def sample_source_add_piggyback_find_archives(monkeypatch, sample_source):
441
    """
442
    Add piggybacked foreign system packages, use pre-downloaded foreign package
443
    archives (specify their directory as argument to Build()).
444
    """
445
    # Dir next to 'sample_source' will also be gc'd by sample_source() fixture.
446
    foreign_packages_dir = sample_source.parent / 'hello.foreign-packages'
447

    
448
    prepare_foreign_packages_dir(foreign_packages_dir)
449

    
450
    sample_source_add_piggyback(monkeypatch, sample_source)
451

    
452
@variant_maker
453
def sample_source_add_piggyback_no_download(monkeypatch, sample_source,
454
                                            pass_directory_to_build=False):
455
    """
456
    Add piggybacked foreign system packages, use pre-downloaded foreign package
457
    archives.
458
    """
459
    # Use a dir next to 'sample_source'; have it gc'd by sample_source fixture.
460
    if pass_directory_to_build:
461
        foreign_packages_dir = sample_source.parent / 'arbitrary-name'
462
    else:
463
        foreign_packages_dir = sample_source.parent / 'hello.foreign-packages'
464

    
465
    prepare_foreign_packages_dir(foreign_packages_dir)
466

    
467
    sample_source_add_piggyback(monkeypatch, sample_source)
468

    
469
@pytest.fixture(params=[lambda m, s: None, *variant_makers])
470
def sample_source_make_variants(request, monkeypatch, sample_source,
471
                                mock_piggybacked_apt_system):
472
    """
473
    Prepare a directory with sample Haketilo source package in multiple slightly
474
    different versions (all correct). Return an index.json path that should be
475
    used when performing test build.
476
    """
477
    index_path = request.param(monkeypatch, sample_source) or Path('index.json')
478

    
479
    index_text = json.dumps(index_obj)
480

    
481
    (sample_source / index_path).write_text(index_text)
482

    
483
    monkeypatch.setitem(src_files, 'index.json', index_text.encode())
484

    
485
    return index_path
486

    
487
@pytest.mark.subprocess_run(build, run_reuse)
488
@pytest.mark.usefixtures('mock_subprocess_run')
489
def test_build(sample_source, sample_source_make_variants, tmpdir):
490
    """Build the sample source package and verify the produced files."""
491
    index_json_path = sample_source_make_variants
492

    
493
    # First, build the package
494
    build.Build(sample_source, index_json_path).write_package_files(tmpdir)
495

    
496
    # Verify directories under destination directory
497
    assert {'file', 'resource', 'mapping', 'source'} == \
498
        set([path.name for path in tmpdir.iterdir()])
499

    
500
    # Verify files under 'file/'
501
    file_dir = tmpdir / 'file' / 'sha256'
502

    
503
    for name, contents in dist_files.items():
504
        dist_file_path = file_dir / sha256_hashes[name]
505
        assert dist_file_path.is_file()
506
        assert dist_file_path.read_bytes() == contents
507

    
508
    assert {p.name for p in file_dir.iterdir()} == \
509
        {sha256_hashes[name] for name in dist_files.keys()}
510

    
511
    # Verify files under 'resource/'
512
    resource_dir = tmpdir / 'resource'
513

    
514
    assert {rj['identifier'] for rj in expected_resources} == \
515
        {path.name for path in resource_dir.iterdir()}
516

    
517
    for resource_json in expected_resources:
518
        subdir = resource_dir / resource_json['identifier']
519
        assert ['2021.11.10'] == [path.name for path in subdir.iterdir()]
520

    
521
        assert json.loads((subdir / '2021.11.10').read_text()) == resource_json
522

    
523
        hydrilla_util.validator_for('api_resource_description-1.0.1.schema.json')\
524
                     .validate(resource_json)
525

    
526
    # Verify files under 'mapping/'
527
    mapping_dir = tmpdir / 'mapping'
528
    assert ['helloapple'] == [path.name for path in mapping_dir.iterdir()]
529

    
530
    subdir = mapping_dir / 'helloapple'
531
    assert ['2021.11.10'] == [path.name for path in subdir.iterdir()]
532

    
533
    assert json.loads((subdir / '2021.11.10').read_text()) == expected_mapping
534

    
535
    hydrilla_util.validator_for('api_mapping_description-1.0.1.schema.json')\
536
                 .validate(expected_mapping)
537

    
538
    # Verify files under 'source/'
539
    source_dir = tmpdir / 'source'
540
    assert {'hello.json', 'hello.zip'} == \
541
        {path.name for path in source_dir.iterdir()}
542

    
543
    archive_files = {**dict((f'hello/{name}', contents)
544
                            for name, contents in src_files.items()),
545
                     **extra_archive_files}
546

    
547
    with ZipFile(source_dir / 'hello.zip', 'r') as archive:
548
        print(archive.namelist())
549
        assert len(archive.namelist()) == len(archive_files)
550

    
551
        for name, contents in archive_files.items():
552
            assert archive.read(name) == contents
553

    
554
    zip_ref = expected_source_description['source_archives']['zip']
555
    zip_contents = (source_dir / 'hello.zip').read_bytes()
556
    zip_ref['sha256'] = sha256(zip_contents).digest().hex()
557

    
558
    assert json.loads((source_dir / 'hello.json').read_text()) == \
559
        expected_source_description
560

    
561
    hydrilla_util.validator_for('api_source_description-1.0.1.schema.json')\
562
                 .validate(expected_source_description)
563

    
564
error_makers = []
565
def error_maker(function):
566
    """Decorate function by placing it in error_makers array."""
567
    error_makers.append(function)
568

    
569
@error_maker
570
def sample_source_error_missing_file(monkeypatch, sample_source):
571
    """
572
    Modify index.json to expect missing report.spdx file and cause an error.
573
    """
574
    monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
575
    return FileReferenceError, '^referenced_file_report.spdx_missing$'
576

    
577
@error_maker
578
def sample_source_error_index_schema(monkeypatch, sample_source):
579
    """Modify index.json to be incompliant with the schema."""
580
    monkeypatch.delitem(index_obj, 'definitions')
581
    return ValidationError,
582

    
583
@error_maker
584
def sample_source_error_bad_comment(monkeypatch, sample_source):
585
    """Modify index.json to have an invalid '/' in it."""
586
    return json.JSONDecodeError, '^bad_comment: .*', \
587
        json.dumps(index_obj) + '/something\n'
588

    
589
@error_maker
590
def sample_source_error_bad_json(monkeypatch, sample_source):
591
    """Modify index.json to not be valid json even after comment stripping."""
592
    return json.JSONDecodeError, '', json.dumps(index_obj) + '???\n'
593

    
594
@error_maker
595
def sample_source_error_missing_reuse(monkeypatch, sample_source):
596
    """Cause mocked reuse process invocation to fail with FileNotFoundError."""
597
    (sample_source / 'mock_reuse_missing').touch()
598
    return build.ReuseError, '^couldnt_execute_reuse_is_it_installed$'
599

    
600
@error_maker
601
def sample_source_error_missing_license(monkeypatch, sample_source):
602
    """Remove a file to make package REUSE-incompliant."""
603
    (sample_source / 'README.txt.license').unlink()
604

    
605
    error_regex = """^\
606
command_reuse --root \\S+ lint_failed
607

    
608
STDOUT_OUTPUT_heading
609

    
610
dummy lint output
611

    
612
STDERR_OUTPUT_heading
613

    
614
some error output\
615
$\
616
"""
617

    
618
    return build.ReuseError, error_regex
619

    
620
@error_maker
621
def sample_source_error_file_outside(monkeypatch, sample_source):
622
    """Make index.json illegally reference a file outside srcdir."""
623
    new_list = [*index_obj['copyright'], {'file': '../abc'}]
624
    monkeypatch.setitem(index_obj, 'copyright', new_list)
625
    return FileReferenceError, '^path_contains_double_dot_\\.\\./abc$'
626

    
627
@error_maker
628
def sample_source_error_reference_itself(monkeypatch, sample_source):
629
    """Make index.json illegally reference index.json."""
630
    new_list = [*index_obj['copyright'], {'file': 'index.json'}]
631
    monkeypatch.setitem(index_obj, 'copyright', new_list)
632
    return FileReferenceError, '^loading_reserved_index_json$'
633

    
634
@error_maker
635
def sample_source_error_report_excluded(monkeypatch, sample_source):
636
    """
637
    Make index.json require generation of report.spdx but don't include it among
638
    copyright files.
639
    """
640
    new_list = [file_ref for file_ref in index_obj['copyright']
641
                if file_ref['file'] != 'report.spdx']
642
    monkeypatch.setitem(index_obj, 'copyright', new_list)
643
    return FileReferenceError, '^report_spdx_not_in_copyright_list$'
644

    
645
@pytest.fixture(params=error_makers)
646
def sample_source_make_errors(request, monkeypatch, sample_source):
647
    """
648
    Prepare a directory with sample Haketilo source package in multiple slightly
649
    broken versions. Return an error type that should be raised when running
650
    test build.
651
    """
652
    error_type, error_regex, index_text = \
653
        [*request.param(monkeypatch, sample_source), '', ''][0:3]
654

    
655
    index_text = index_text or json.dumps(index_obj)
656

    
657
    (sample_source / 'index.json').write_text(index_text)
658

    
659
    monkeypatch.setitem(src_files, 'index.json', index_text.encode())
660

    
661
    return error_type, error_regex
662

    
663
@pytest.mark.subprocess_run(build, run_reuse)
664
@pytest.mark.usefixtures('mock_subprocess_run')
665
def test_build_error(tmpdir, sample_source, sample_source_make_errors):
666
    """Try building the sample source package and verify generated errors."""
667
    error_type, error_regex = sample_source_make_errors
668

    
669
    dstdir = Path(tmpdir) / 'dstdir'
670
    dstdir.mkdir(exist_ok=True)
671

    
672
    with pytest.raises(error_type, match=error_regex):
673
        build.Build(sample_source, Path('index.json'))\
674
             .write_package_files(dstdir)
(4-4/5)