Project

General

Profile

Download (28.7 KB) Statistics
| Branch: | Tag: | Revision:

hydrilla-builder / tests / test_build.py @ 73a443f5

1
# SPDX-License-Identifier: CC0-1.0
2

    
3
# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
4
#
5
# Available under the terms of Creative Commons Zero v1.0 Universal.
6

    
7
# Enable using with Python 3.7.
8
from __future__ import annotations
9

    
10
import pytest
11
import json
12
import shutil
13

    
14
from tempfile import TemporaryDirectory
15
from pathlib import Path, PurePosixPath
16
from hashlib import sha256
17
from zipfile import ZipFile
18
from contextlib import contextmanager
19

    
20
from jsonschema import ValidationError
21

    
22
from hydrilla import util as hydrilla_util
23
from hydrilla.util._util import _schema_name_re
24
from hydrilla.builder import build, _version, local_apt
25
from hydrilla.builder.common_errors import *
26

    
27
from .helpers import *
28

    
29
here = Path(__file__).resolve().parent
30

    
31
expected_generated_by = {
32
    'name': 'hydrilla.builder',
33
    'version': _version.version
34
}
35

    
36
orig_srcdir = here / 'source-package-example'
37

    
38
index_obj, _ = hydrilla_util.load_instance_from_file(orig_srcdir / 'index.json')
39

    
40
def read_files(*file_list):
41
    """
42
    Take names of files under srcdir and return a dict that maps them to their
43
    contents (as bytes).
44
    """
45
    return dict((name, (orig_srcdir / name).read_bytes()) for name in file_list)
46

    
47
dist_files = {
48
    **read_files('LICENSES/CC0-1.0.txt', 'bye.js', 'hello.js', 'message.js'),
49
    'report.spdx': b'dummy spdx output'
50
}
51
src_files = {
52
    **dist_files,
53
    **read_files('README.txt', 'README.txt.license', '.reuse/dep5',
54
                 'index.json')
55
}
56
extra_archive_files = {
57
}
58

    
59
sha256_hashes = dict((name, sha256(contents).digest().hex())
60
                     for name, contents in src_files.items())
61

    
62
del src_files['report.spdx']
63

    
64
expected_source_copyright = [{
65
    'file': 'report.spdx',
66
    'sha256': sha256_hashes['report.spdx']
67
}, {
68
    'file': 'LICENSES/CC0-1.0.txt',
69
    'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
70
}]
71

    
72
expected_resources = [{
73
    '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
74
    'source_name': 'hello',
75
    'source_copyright': expected_source_copyright,
76
    'type': 'resource',
77
    'identifier': 'helloapple',
78
    'long_name': 'Hello Apple',
79
    'uuid': 'a6754dcb-58d8-4b7a-a245-24fd7ad4cd68',
80
    'version': [2021, 11, 10],
81
    'revision': 1,
82
    'description': 'greets an apple',
83
    'dependencies': [{'identifier': 'hello-message'}],
84
    'scripts': [{
85
        'file': 'hello.js',
86
        'sha256': sha256_hashes['hello.js']
87
    }, {
88
        'file': 'bye.js',
89
        'sha256': sha256_hashes['bye.js']
90
    }],
91
    'generated_by': expected_generated_by
92
}, {
93
    '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
94
    'source_name': 'hello',
95
    'source_copyright': expected_source_copyright,
96
    'type': 'resource',
97
    'identifier': 'hello-message',
98
    'long_name': 'Hello Message',
99
    'uuid': '1ec36229-298c-4b35-8105-c4f2e1b9811e',
100
    'version': [2021, 11, 10],
101
    'revision': 2,
102
    'description': 'define messages for saying hello and bye',
103
    'dependencies': [],
104
    'scripts': [{
105
        'file': 'message.js',
106
        'sha256': sha256_hashes['message.js']
107
    }],
108
    'generated_by': expected_generated_by
109
}]
110

    
111
expected_mapping = {
112
    '$schema': 'https://hydrilla.koszko.org/schemas/api_mapping_description-1.schema.json',
113
    'source_name': 'hello',
114
    'source_copyright': expected_source_copyright,
115
    'type': 'mapping',
116
    'identifier': 'helloapple',
117
    'long_name': 'Hello Apple',
118
    'uuid': '54d23bba-472e-42f5-9194-eaa24c0e3ee7',
119
    'version': [2021, 11, 10],
120
    'description': 'causes apple to get greeted on Hydrillabugs issue tracker',
121
    'payloads': {
122
	'https://hydrillabugs.koszko.org/***': {
123
	    'identifier': 'helloapple'
124
	},
125
	'https://hachettebugs.koszko.org/***': {
126
	    'identifier': 'helloapple'
127
        }
128
    },
129
    'generated_by': expected_generated_by
130
}
131

    
132
expected_source_description = {
133
    '$schema': 'https://hydrilla.koszko.org/schemas/api_source_description-1.schema.json',
134
    'source_name': 'hello',
135
    'source_copyright': expected_source_copyright,
136
    'source_archives': {
137
        'zip': {
138
            'sha256': '!!!!value to fill during test!!!!',
139
        }
140
    },
141
    'upstream_url': 'https://git.koszko.org/hydrilla-source-package-example',
142
    'definitions': [{
143
        'type': 'mapping',
144
        'identifier': 'helloapple',
145
	'long_name': 'Hello Apple',
146
        'version': [2021, 11, 10],
147
    }, {
148
        'type': 'resource',
149
        'identifier': 'helloapple',
150
        'long_name': 'Hello Apple',
151
        'version': [2021, 11, 10],
152
    }, {
153
        'type':       'resource',
154
        'identifier': 'hello-message',
155
        'long_name': 'Hello Message',
156
        'version':     [2021, 11, 10],
157
    }],
158
    'generated_by': expected_generated_by
159
}
160

    
161
expected = [expected_mapping, *expected_resources, expected_source_description]
162
expected_items = expected[:3]
163

    
164
def run_reuse(command, **kwargs):
165
    """
166
    Instead of running a 'reuse' command, check if 'mock_reuse_missing' file
167
    exists under root directory. If yes, raise FileNotFoundError as if 'reuse'
168
    command was missing. If not, check if 'README.txt.license' file exists
169
    in the requested directory and return zero if it does.
170
    """
171
    expected = ['reuse', '--root', '<root>',
172
                'lint' if 'lint' in command else 'spdx']
173

    
174
    root_path = Path(process_command(command, expected)['root'])
175

    
176
    if (root_path / 'mock_reuse_missing').exists():
177
        raise FileNotFoundError('dummy')
178

    
179
    is_reuse_compliant = (root_path / 'README.txt.license').exists()
180

    
181
    return MockedCompletedProcess(command, 1 - is_reuse_compliant,
182
                                  stdout=f'dummy {expected[-1]} output',
183
                                  text_output=kwargs.get('text'))
184

    
185
mocked_piggybacked_archives = [
186
    PurePosixPath('apt/something.deb'),
187
    PurePosixPath('apt/something.orig.tar.gz'),
188
    PurePosixPath('apt/something.debian.tar.xz'),
189
    PurePosixPath('othersystem/other-something.tar.gz')
190
]
191

    
192
@pytest.fixture
193
def mock_piggybacked_apt_system(monkeypatch):
194
    """Make local_apt.piggybacked_system() return a mocked result."""
195
    # We set 'td' to a temporary dir path further below.
196
    td = None
197

    
198
    class MockedPiggybacked:
199
        """Minimal mock of Piggybacked object."""
200
        package_license_files = [PurePosixPath('.apt-root/.../copyright')]
201
        resource_must_depend = [{'identifier': 'apt-common-licenses'}]
202

    
203
        def resolve_file(path):
204
            """
205
            For each path that starts with '.apt-root' return a valid dummy file
206
            path.
207
            """
208
            if path.parts[0] != '.apt-root':
209
                return None
210

    
211
            (td / path.name).write_text(f'dummy {path.name}')
212

    
213
            return (td / path.name)
214

    
215
        def archive_files():
216
            """Yield some valid dummy file path tuples."""
217
            for desired_path in mocked_piggybacked_archives:
218
                real_path = td / desired_path.name
219
                real_path.write_text(f'dummy {desired_path.name}')
220

    
221
                yield desired_path, real_path
222

    
223
    @contextmanager
224
    def mocked_piggybacked_system(piggyback_def, piggyback_files):
225
        """Mock the execution of local_apt.piggybacked_system()."""
226
        assert piggyback_def == {
227
	    'system': 'apt',
228
	    'distribution': 'nabia',
229
	    'packages': ['somelib=1.0'],
230
	    'dependencies': False
231
        }
232
        if piggyback_files is not None:
233
            assert {str(path) for path in mocked_piggybacked_archives} == \
234
                {path.relative_to(piggyback_files).as_posix()
235
                 for path in piggyback_files.rglob('*') if path.is_file()}
236

    
237
        yield MockedPiggybacked
238

    
239
    monkeypatch.setattr(local_apt, 'piggybacked_system',
240
                        mocked_piggybacked_system)
241

    
242
    with TemporaryDirectory() as td:
243
        td = Path(td)
244
        yield
245

    
246
@pytest.fixture
247
def sample_source():
248
    """Prepare a directory with sample Haketilo source package."""
249
    with TemporaryDirectory() as td:
250
        sample_source = Path(td) / 'hello'
251
        for name, contents in src_files.items():
252
            path = sample_source / name
253
            path.parent.mkdir(parents=True, exist_ok=True)
254
            path.write_bytes(contents)
255

    
256
        yield sample_source
257

    
258
variant_makers = []
259
def variant_maker(function):
260
    """Decorate function by placing it in variant_makers array."""
261
    variant_makers.append(function)
262
    return function
263

    
264
@variant_maker
265
def sample_source_change_index_json(monkeypatch, sample_source):
266
    """
267
    Return a non-standard path for index.json. Ensure parent directories exist.
268
    """
269
    # Use a path under sample_source so that it gets auto-deleted after the
270
    # test. Use a file under .git because .git is ignored by REUSE.
271
    path = sample_source / '.git' / 'replacement.json'
272
    path.parent.mkdir()
273
    return path
274

    
275
@variant_maker
276
def sample_source_add_comments(monkeypatch, sample_source):
277
    """Add index.json comments that should be preserved."""
278
    for dictionary in index_obj, *index_obj['definitions'], *expected:
279
        monkeypatch.setitem(dictionary, 'comment', 'index.json comment')
280

    
281
@variant_maker
282
def sample_source_remove_spdx(monkeypatch, sample_source):
283
    """Remove spdx report generation."""
284
    monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
285

    
286
    pred = lambda ref: ref['file'] != 'report.spdx'
287
    copy_refs_in = list(filter(pred, index_obj['copyright']))
288
    monkeypatch.setitem(index_obj, 'copyright', copy_refs_in)
289

    
290
    copy_refs_out = list(filter(pred, expected_source_copyright))
291
    for obj in expected:
292
        monkeypatch.setitem(obj, 'source_copyright', copy_refs_out)
293

    
294
    monkeypatch.delitem(dist_files, 'report.spdx')
295

    
296
    # To verify that reuse does not get called now, make mocked subprocess.run()
297
    # raise an error if called.
298
    (sample_source / 'mock_reuse_missing').touch()
299

    
300
@variant_maker
301
def sample_source_remove_additional_files(monkeypatch, sample_source):
302
    """Use default value ([]) for 'additionall_files' property."""
303
    monkeypatch.delitem(index_obj, 'additional_files')
304

    
305
    for name in 'README.txt', 'README.txt.license', '.reuse/dep5':
306
        monkeypatch.delitem(src_files, name)
307

    
308
@variant_maker
309
def sample_source_remove_script(monkeypatch, sample_source):
310
    """Use default value ([]) for 'scripts' property in one of the resources."""
311
    monkeypatch.delitem(index_obj['definitions'][2], 'scripts')
312

    
313
    monkeypatch.setitem(expected_resources[1], 'scripts', [])
314

    
315
    for files in dist_files, src_files:
316
        monkeypatch.delitem(files, 'message.js')
317

    
318
@variant_maker
319
def sample_source_remove_payloads(monkeypatch, sample_source):
320
    """Use default value ({}) for 'payloads' property in mapping."""
321
    monkeypatch.delitem(index_obj['definitions'][0], 'payloads')
322

    
323
    monkeypatch.setitem(expected_mapping, 'payloads', {})
324

    
325
@variant_maker
326
def sample_source_remove_uuids(monkeypatch, sample_source):
327
    """Don't use UUIDs (they are optional)."""
328
    for definition in index_obj['definitions']:
329
        monkeypatch.delitem(definition, 'uuid')
330

    
331
    for description in expected:
332
        if 'uuid' in description:
333
            monkeypatch.delitem(description, 'uuid')
334

    
335
@variant_maker
336
def sample_source_add_extra_props(monkeypatch, sample_source):
337
    """Add some unrecognized properties that should be stripped."""
338
    to_process = [index_obj]
339
    while to_process:
340
        processed = to_process.pop()
341

    
342
        if type(processed) is list:
343
            to_process.extend(processed)
344
        elif type(processed) is dict and 'spurious_property' not in processed:
345
            to_process.extend(v for k, v in processed.items()
346
                              if k != 'payloads')
347
            monkeypatch.setitem(processed, 'spurious_property', 'some_value')
348

    
349
@variant_maker
350
def sample_source_make_version_2(monkeypatch, sample_source,
351
                                 expected_documents_to_modify=[]):
352
    """Increase sources' schema version from to 2."""
353
    for obj in index_obj, *expected_documents_to_modify:
354
        monkeypatch.setitem(obj, '$schema', obj['$schema'].replace('1', '2'))
355

    
356
@variant_maker
357
def sample_source_cors_bypass_ignored(monkeypatch, sample_source, value=True):
358
    """
359
    Specify CORS bypass permissions in sources, but keep sources' schema version
360
    at 1.
361
    """
362
    for definition in index_obj['definitions']:
363
        monkeypatch.setitem(definition, 'permissions', {'cors_bypass': value})
364

    
365
@variant_maker
366
def sample_source_cors_bypass(monkeypatch, sample_source):
367
    """Specify CORS bypass permissions in sources."""
368
    sample_source_cors_bypass_ignored(monkeypatch, sample_source, value=True)
369
    sample_source_make_version_2(monkeypatch, sample_source, expected_items)
370

    
371
    for obj in expected_items:
372
        monkeypatch.setitem(obj, 'permissions', {'cors_bypass': True})
373

    
374
@variant_maker
375
def sample_source_cors_bypass_defaults(monkeypatch, sample_source):
376
    """
377
    Specify CORS bypass permissions in sources but use the default value
378
    ("False").
379
    """
380
    sample_source_cors_bypass_ignored(monkeypatch, sample_source, value=False)
381
    sample_source_make_version_2(monkeypatch, sample_source)
382

    
383
@variant_maker
384
def sample_source_req_mappings_ignored(monkeypatch, sample_source,
385
                                       value=[{'identifier': 'mapping-dep'}]):
386
    """
387
    Specify dependencies on mappings, but keep sources' schema version at 1.
388
    """
389
    for definition in index_obj['definitions']:
390
        monkeypatch.setitem(definition, 'required_mappings', value);
391

    
392
@variant_maker
393
def sample_source_req_mappings(monkeypatch, sample_source):
394
    """Specify dependencies on mappings."""
395
    sample_source_req_mappings_ignored(monkeypatch, sample_source)
396
    sample_source_make_version_2(monkeypatch, sample_source, expected_items)
397

    
398
    for obj in expected_items:
399
        monkeypatch.setitem(obj, 'required_mappings',
400
                            [{'identifier': 'mapping-dep'}])
401

    
402
@variant_maker
403
def sample_source_req_mappings_defaults(monkeypatch, sample_source):
404
    """Specify dependencies of a mapping, but use the default value ("[]")."""
405
    sample_source_req_mappings_ignored(monkeypatch, sample_source, value=[])
406
    sample_source_make_version_2(monkeypatch, sample_source)
407

    
408
@variant_maker
409
def sample_source_combined_def(monkeypatch, sample_source):
410
    """Define mapping and resource together."""
411
    sample_source_make_version_2(monkeypatch, sample_source)
412

    
413
    mapping_def   = index_obj['definitions'][0]
414
    resource_defs = index_obj['definitions'][1:3]
415

    
416
    item_defs_shortened = [mapping_def, resource_defs[1]]
417
    monkeypatch.setitem(index_obj, 'definitions', item_defs_shortened)
418

    
419
    monkeypatch.setitem(mapping_def, 'type', 'mapping_and_resource')
420

    
421
    new_mapping_ver = [*expected_mapping['version'], 1]
422
    monkeypatch.setitem(mapping_def, 'revision', 1)
423
    monkeypatch.setitem(expected_mapping, 'version', new_mapping_ver)
424

    
425
    for prop in 'scripts', 'dependencies':
426
        monkeypatch.setitem(mapping_def, prop, resource_defs[0][prop])
427

    
428
    monkeypatch.setitem(expected_resources[0], 'uuid', mapping_def['uuid'])
429
    monkeypatch.setitem(expected_resources[0], 'description',
430
                        mapping_def['description'])
431

    
432
    monkeypatch.setitem(expected_source_description['definitions'][0],
433
                        'version', new_mapping_ver)
434

    
435
@variant_maker
436
def sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source,
437
                                              min_ver=[1, 2], max_ver=[1, 2]):
438
    """
439
    Specify version constraints on Haketilo, but keep sources' schema version at
440
    1.
441
    """
442
    mapping_def = index_obj['definitions'][0]
443
    monkeypatch.setitem(mapping_def, 'min_haketilo_version', min_ver)
444
    monkeypatch.setitem(mapping_def, 'max_haketilo_version', max_ver)
445

    
446
@variant_maker
447
def sample_source_minmax_haketilo_ver(monkeypatch, sample_source):
448
    """Specify version constraints on Haketilo."""
449
    sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source)
450
    sample_source_make_version_2(monkeypatch, sample_source, [expected_mapping])
451

    
452
    monkeypatch.setitem(expected_mapping, 'min_haketilo_version', [1, 2])
453
    monkeypatch.setitem(expected_mapping, 'max_haketilo_version', [1, 2])
454

    
455
@variant_maker
456
def sample_source_minmax_haketilo_ver_default(monkeypatch, sample_source):
457
    """Specify version constraints on Haketilo, but use default values."""
458
    sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source,
459
                                              min_ver=[1], max_ver=[65536])
460
    sample_source_make_version_2(monkeypatch, sample_source)
461

    
462
piggyback_archive_names = [
463
    'apt/something.deb',
464
    'apt/something.orig.tar.gz',
465
    'apt/something.debian.tar.xz',
466
    'othersystem/other-something.tar.gz'
467
]
468

    
469
@variant_maker
470
def sample_source_add_piggyback_ignored(monkeypatch, sample_source,
471
                                        extra_build_args={}):
472
    """
473
    Add piggybacked foreign system packages, but keep sources' schema version at
474
    1.
475
    """
476
    old_build = build.Build
477
    new_build = lambda *a, **kwa: old_build(*a, **kwa, **extra_build_args)
478
    monkeypatch.setattr(build, 'Build', new_build)
479

    
480
    monkeypatch.setitem(index_obj, 'piggyback_on', {
481
	'system': 'apt',
482
	'distribution': 'nabia',
483
	'packages': ['somelib=1.0'],
484
	'dependencies': False
485
    })
486

    
487
@variant_maker
488
def sample_source_add_piggyback(monkeypatch, sample_source,
489
                                extra_build_args={}):
490
    """Add piggybacked foreign system packages."""
491
    sample_source_add_piggyback_ignored\
492
        (monkeypatch, sample_source, extra_build_args)
493

    
494
    sample_source_make_version_2(monkeypatch, sample_source)
495

    
496
    new_refs = {}
497
    for name in '.apt-root/.../copyright', '.apt-root/.../script.js':
498
        contents = f'dummy {PurePosixPath(name).name}'.encode()
499
        digest = sha256(contents).digest().hex()
500
        monkeypatch.setitem(dist_files, name, contents)
501
        monkeypatch.setitem(sha256_hashes, name, digest)
502
        new_refs[PurePosixPath(name).name] = {'file': name, 'sha256': digest}
503

    
504
    new_list = [*expected_source_copyright, new_refs['copyright']]
505
    for obj in expected:
506
        monkeypatch.setitem(obj, 'source_copyright', new_list)
507

    
508
    for obj in expected_resources:
509
        new_list = [{'identifier': 'apt-common-licenses'}, *obj['dependencies']]
510
        monkeypatch.setitem(obj, 'dependencies', new_list)
511

    
512
    for obj in index_obj['definitions'][1], expected_resources[0]:
513
        new_list = [new_refs['script.js'], *obj['scripts']]
514
        monkeypatch.setitem(obj, 'scripts', new_list)
515

    
516
    for name in piggyback_archive_names:
517
        path = PurePosixPath('hello.foreign-packages') / name
518
        monkeypatch.setitem(extra_archive_files, str(path),
519
                            f'dummy {path.name}'.encode())
520

    
521
def prepare_foreign_packages_dir(path):
522
    """
523
    Put some dummy archive in the directory so that it can be passed to
524
    piggybacked_system().
525
    """
526
    for name in piggyback_archive_names:
527
        archive_path = path / name
528
        archive_path.parent.mkdir(parents=True, exist_ok=True)
529
        archive_path.write_text(f'dummy {archive_path.name}')
530

    
531
@variant_maker
532
def sample_source_add_piggyback_pass_archives(monkeypatch, sample_source):
533
    """
534
    Add piggybacked foreign system packages, use pre-downloaded foreign package
535
    archives (have Build() find them in their default directory).
536
    """
537
    # Dir next to 'sample_source' will also be gc'd by sample_source() fixture.
538
    foreign_packages_dir = sample_source.parent / 'arbitrary-name'
539

    
540
    prepare_foreign_packages_dir(foreign_packages_dir)
541

    
542
    sample_source_add_piggyback(monkeypatch, sample_source,
543
                                {'piggyback_files': foreign_packages_dir})
544

    
545
@variant_maker
546
def sample_source_add_piggyback_find_archives(monkeypatch, sample_source):
547
    """
548
    Add piggybacked foreign system packages, use pre-downloaded foreign package
549
    archives (specify their directory as argument to Build()).
550
    """
551
    # Dir next to 'sample_source' will also be gc'd by sample_source() fixture.
552
    foreign_packages_dir = sample_source.parent / 'hello.foreign-packages'
553

    
554
    prepare_foreign_packages_dir(foreign_packages_dir)
555

    
556
    sample_source_add_piggyback(monkeypatch, sample_source)
557

    
558
@variant_maker
559
def sample_source_add_piggyback_no_download(monkeypatch, sample_source,
560
                                            pass_directory_to_build=False):
561
    """
562
    Add piggybacked foreign system packages, use pre-downloaded foreign package
563
    archives.
564
    """
565
    # Use a dir next to 'sample_source'; have it gc'd by sample_source fixture.
566
    if pass_directory_to_build:
567
        foreign_packages_dir = sample_source.parent / 'arbitrary-name'
568
    else:
569
        foreign_packages_dir = sample_source.parent / 'hello.foreign-packages'
570

    
571
    prepare_foreign_packages_dir(foreign_packages_dir)
572

    
573
    sample_source_add_piggyback(monkeypatch, sample_source)
574

    
575
@pytest.fixture(params=[lambda m, s: None, *variant_makers])
576
def sample_source_make_variants(request, monkeypatch, sample_source,
577
                                mock_piggybacked_apt_system):
578
    """
579
    Prepare a directory with sample Haketilo source package in multiple slightly
580
    different versions (all correct). Return an index.json path that should be
581
    used when performing test build.
582
    """
583
    index_path = request.param(monkeypatch, sample_source) or Path('index.json')
584

    
585
    index_text = json.dumps(index_obj)
586

    
587
    (sample_source / index_path).write_text(index_text)
588

    
589
    monkeypatch.setitem(src_files, 'index.json', index_text.encode())
590

    
591
    return index_path
592

    
593
def try_validate(as_what, instance):
594
    """
595
    Select the right JSON schema. Return without errors only if the instance
596
    validates against it.
597
    """
598
    major = _schema_name_re.search(instance['$schema']).group('major')
599
    exact_schema_version = {'1': '1.0.1', '2': '2'}[major]
600
    schema_filename = f'{as_what}-{exact_schema_version}.schema.json'
601
    hydrilla_util.validator_for(schema_filename).validate(instance)
602

    
603
@pytest.mark.subprocess_run(build, run_reuse)
604
@pytest.mark.usefixtures('mock_subprocess_run')
605
def test_build(sample_source, sample_source_make_variants, tmpdir):
606
    """Build the sample source package and verify the produced files."""
607
    index_json_path = sample_source_make_variants
608

    
609
    # First, build the package
610
    build.Build(sample_source, index_json_path).write_package_files(tmpdir)
611

    
612
    # Verify directories under destination directory
613
    assert {'file', 'resource', 'mapping', 'source'} == \
614
        set([path.name for path in tmpdir.iterdir()])
615

    
616
    # Verify files under 'file/'
617
    file_dir = tmpdir / 'file' / 'sha256'
618

    
619
    for name, contents in dist_files.items():
620
        dist_file_path = file_dir / sha256_hashes[name]
621
        assert dist_file_path.is_file()
622
        assert dist_file_path.read_bytes() == contents
623

    
624
    assert {p.name for p in file_dir.iterdir()} == \
625
        {sha256_hashes[name] for name in dist_files.keys()}
626

    
627
    # Verify files under 'resource/'
628
    resource_dir = tmpdir / 'resource'
629

    
630
    assert {rj['identifier'] for rj in expected_resources} == \
631
        {path.name for path in resource_dir.iterdir()}
632

    
633
    for resource_json in expected_resources:
634
        subdir = resource_dir / resource_json['identifier']
635
        ver_str = hydrilla_util.version_string(resource_json['version'])
636
        assert [ver_str] == [path.name for path in subdir.iterdir()]
637

    
638
        assert json.loads((subdir / ver_str).read_text()) == resource_json
639

    
640
        try_validate('api_resource_description', resource_json)
641

    
642
    # Verify files under 'mapping/'
643
    mapping_dir = tmpdir / 'mapping'
644
    assert ['helloapple'] == [path.name for path in mapping_dir.iterdir()]
645

    
646
    subdir = mapping_dir / 'helloapple'
647

    
648
    ver_str = hydrilla_util.version_string(expected_mapping['version'])
649
    assert [ver_str] == [path.name for path in subdir.iterdir()]
650

    
651
    assert json.loads((subdir / ver_str).read_text()) == expected_mapping
652

    
653
    try_validate('api_mapping_description', expected_mapping)
654

    
655
    # Verify files under 'source/'
656
    source_dir = tmpdir / 'source'
657
    assert {'hello.json', 'hello.zip'} == \
658
        {path.name for path in source_dir.iterdir()}
659

    
660
    archive_files = {**dict((f'hello/{name}', contents)
661
                            for name, contents in src_files.items()),
662
                     **extra_archive_files}
663

    
664
    with ZipFile(source_dir / 'hello.zip', 'r') as archive:
665
        print(archive.namelist())
666
        assert len(archive.namelist()) == len(archive_files)
667

    
668
        for name, contents in archive_files.items():
669
            assert archive.read(name) == contents
670

    
671
    zip_ref = expected_source_description['source_archives']['zip']
672
    zip_contents = (source_dir / 'hello.zip').read_bytes()
673
    zip_ref['sha256'] = sha256(zip_contents).digest().hex()
674

    
675
    assert json.loads((source_dir / 'hello.json').read_text()) == \
676
        expected_source_description
677

    
678
    try_validate('api_source_description', expected_source_description)
679

    
680
error_makers = []
681
def error_maker(function):
682
    """Decorate function by placing it in error_makers array."""
683
    error_makers.append(function)
684

    
685
@error_maker
686
def sample_source_error_missing_file(monkeypatch, sample_source):
687
    """
688
    Modify index.json to expect missing report.spdx file and cause an error.
689
    """
690
    monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
691
    return FileReferenceError, '^referenced_file_report.spdx_missing$'
692

    
693
@error_maker
694
def sample_source_error_index_schema(monkeypatch, sample_source):
695
    """Modify index.json to be incompliant with the schema."""
696
    monkeypatch.delitem(index_obj, 'definitions')
697
    return ValidationError,
698

    
699
@error_maker
700
def sample_source_error_bad_comment(monkeypatch, sample_source):
701
    """Modify index.json to have an invalid '/' in it."""
702
    return json.JSONDecodeError, '^bad_comment: .*', \
703
        json.dumps(index_obj) + '/something\n'
704

    
705
@error_maker
706
def sample_source_error_bad_json(monkeypatch, sample_source):
707
    """Modify index.json to not be valid json even after comment stripping."""
708
    return json.JSONDecodeError, '', json.dumps(index_obj) + '???\n'
709

    
710
@error_maker
711
def sample_source_error_missing_reuse(monkeypatch, sample_source):
712
    """Cause mocked reuse process invocation to fail with FileNotFoundError."""
713
    (sample_source / 'mock_reuse_missing').touch()
714
    return build.ReuseError, '^couldnt_execute_reuse_is_it_installed$'
715

    
716
@error_maker
717
def sample_source_error_missing_license(monkeypatch, sample_source):
718
    """Remove a file to make package REUSE-incompliant."""
719
    (sample_source / 'README.txt.license').unlink()
720

    
721
    error_regex = """^\
722
command_reuse --root \\S+ lint_failed
723

    
724
STDOUT_OUTPUT_heading
725

    
726
dummy lint output
727

    
728
STDERR_OUTPUT_heading
729

    
730
some error output\
731
$\
732
"""
733

    
734
    return build.ReuseError, error_regex
735

    
736
@error_maker
737
def sample_source_error_file_outside(monkeypatch, sample_source):
738
    """Make index.json illegally reference a file outside srcdir."""
739
    new_list = [*index_obj['copyright'], {'file': '../abc'}]
740
    monkeypatch.setitem(index_obj, 'copyright', new_list)
741
    return FileReferenceError, '^path_contains_double_dot_\\.\\./abc$'
742

    
743
@error_maker
744
def sample_source_error_reference_itself(monkeypatch, sample_source):
745
    """Make index.json illegally reference index.json."""
746
    new_list = [*index_obj['copyright'], {'file': 'index.json'}]
747
    monkeypatch.setitem(index_obj, 'copyright', new_list)
748
    return FileReferenceError, '^loading_reserved_index_json$'
749

    
750
@error_maker
751
def sample_source_error_report_excluded(monkeypatch, sample_source):
752
    """
753
    Make index.json require generation of report.spdx but don't include it among
754
    copyright files.
755
    """
756
    new_list = [file_ref for file_ref in index_obj['copyright']
757
                if file_ref['file'] != 'report.spdx']
758
    monkeypatch.setitem(index_obj, 'copyright', new_list)
759
    return FileReferenceError, '^report_spdx_not_in_copyright_list$'
760

    
761
@error_maker
762
def sample_source_error_combined_unsupported(monkeypatch, sample_source):
763
    """
764
    Define mapping and resource together but leave source schema version at 1.x
765
    where this is unsupported.
766
    """
767
    mapping_def = index_obj['definitions'][0]
768
    monkeypatch.setitem(mapping_def, 'type', 'mapping_and_resource')
769

    
770
    return ValidationError,
771

    
772
@pytest.fixture(params=error_makers)
773
def sample_source_make_errors(request, monkeypatch, sample_source):
774
    """
775
    Prepare a directory with sample Haketilo source package in multiple slightly
776
    broken versions. Return an error type that should be raised when running
777
    test build.
778
    """
779
    error_type, error_regex, index_text = \
780
        [*request.param(monkeypatch, sample_source), '', ''][0:3]
781

    
782
    index_text = index_text or json.dumps(index_obj)
783

    
784
    (sample_source / 'index.json').write_text(index_text)
785

    
786
    monkeypatch.setitem(src_files, 'index.json', index_text.encode())
787

    
788
    return error_type, error_regex
789

    
790
@pytest.mark.subprocess_run(build, run_reuse)
791
@pytest.mark.usefixtures('mock_subprocess_run')
792
def test_build_error(tmpdir, sample_source, sample_source_make_errors):
793
    """Try building the sample source package and verify generated errors."""
794
    error_type, error_regex = sample_source_make_errors
795

    
796
    dstdir = Path(tmpdir) / 'dstdir'
797
    dstdir.mkdir(exist_ok=True)
798

    
799
    with pytest.raises(error_type, match=error_regex):
800
        build.Build(sample_source, Path('index.json'))\
801
             .write_package_files(dstdir)
(4-4/5)