Revision fbb39772
Added by koszko over 1 year ago
| .gitmodules | ||
|---|---|---|
| 8 | 8 |
path = src/hydrilla/schemas |
| 9 | 9 |
url = ../hydrilla-json-schemas |
| 10 | 10 |
[submodule "src/test/source-package-example"] |
| 11 |
path = src/test/source-package-example
|
|
| 11 |
path = tests/source-package-example
|
|
| 12 | 12 |
url = ../hydrilla-source-package-example |
| MANIFEST.in | ||
|---|---|---|
| 6 | 6 |
|
| 7 | 7 |
include src/hydrilla/schemas/*.schema.json* |
| 8 | 8 |
include src/hydrilla/builder/locales/*/LC_MESSAGES/hydrilla-messages.po |
| 9 |
include src/test/source-package-example/*
|
|
| 10 |
include src/test/source-package-example/LICENSES/*
|
|
| 11 |
include src/test/source-package-example/.reuse/*
|
|
| 9 |
include tests/source-package-example/*
|
|
| 10 |
include tests/source-package-example/LICENSES/*
|
|
| 11 |
include tests/source-package-example/.reuse/*
|
|
| 12 | 12 |
global-exclude .git .gitignore .gitmodules |
| pyproject.toml | ||
|---|---|---|
| 10 | 10 |
|
| 11 | 11 |
[tool.setuptools_scm] |
| 12 | 12 |
write_to = "src/hydrilla/builder/_version.py" |
| 13 |
|
|
| 14 |
[tool.pytest.ini_options] |
|
| 15 |
minversion = "6.0" |
|
| 16 |
addopts = "-ra -q" |
|
| 17 |
testpaths = [ |
|
| 18 |
"tests" |
|
| 19 |
] |
|
| src/conftest.py | ||
|---|---|---|
| 1 |
# SPDX-License-Identifier: CC0-1.0 |
|
| 2 |
|
|
| 3 |
# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org> |
|
| 4 |
# |
|
| 5 |
# Available under the terms of Creative Commons Zero v1.0 Universal. |
|
| src/test/source-package-example | ||
|---|---|---|
| 1 |
Subproject commit 92a4d31c659b2336e5e188877d1ce6bfad2fa310 |
|
| src/test/test_hydrilla_builder.py | ||
|---|---|---|
| 1 |
# SPDX-License-Identifier: CC0-1.0 |
|
| 2 |
|
|
| 3 |
# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org> |
|
| 4 |
# |
|
| 5 |
# Available under the terms of Creative Commons Zero v1.0 Universal. |
|
| 6 |
|
|
| 7 |
# Enable using with Python 3.7. |
|
| 8 |
from __future__ import annotations |
|
| 9 |
|
|
| 10 |
import pytest |
|
| 11 |
import json |
|
| 12 |
import shutil |
|
| 13 |
|
|
| 14 |
from tempfile import TemporaryDirectory |
|
| 15 |
from pathlib import Path |
|
| 16 |
from hashlib import sha256, sha1 |
|
| 17 |
from zipfile import ZipFile |
|
| 18 |
from typing import Callable, Optional, Iterable |
|
| 19 |
|
|
| 20 |
from jsonschema import ValidationError |
|
| 21 |
|
|
| 22 |
from hydrilla import util as hydrilla_util |
|
| 23 |
from hydrilla.builder import build, _version |
|
| 24 |
|
|
| 25 |
here = Path(__file__).resolve().parent |
|
| 26 |
|
|
| 27 |
expected_generated_by = {
|
|
| 28 |
'name': 'hydrilla.builder', |
|
| 29 |
'version': _version.version |
|
| 30 |
} |
|
| 31 |
|
|
| 32 |
default_srcdir = here / 'source-package-example' |
|
| 33 |
|
|
| 34 |
default_js_filenames = ['bye.js', 'hello.js', 'message.js'] |
|
| 35 |
default_dist_filenames = [*default_js_filenames, 'LICENSES/CC0-1.0.txt'] |
|
| 36 |
default_src_filenames = [ |
|
| 37 |
*default_dist_filenames, |
|
| 38 |
'README.txt', 'README.txt.license', '.reuse/dep5', 'index.json' |
|
| 39 |
] |
|
| 40 |
|
|
| 41 |
default_sha1_hashes = {}
|
|
| 42 |
default_sha256_hashes = {}
|
|
| 43 |
default_contents = {}
|
|
| 44 |
|
|
| 45 |
for fn in default_src_filenames: |
|
| 46 |
with open(default_srcdir / fn, 'rb') as file_handle: |
|
| 47 |
default_contents[fn] = file_handle.read() |
|
| 48 |
default_sha256_hashes[fn] = sha256(default_contents[fn]).digest().hex() |
|
| 49 |
default_sha1_hashes[fn] = sha1(default_contents[fn]).digest().hex() |
|
| 50 |
|
|
| 51 |
class CaseSettings: |
|
| 52 |
"""Gather parametrized values in a class.""" |
|
| 53 |
def __init__(self): |
|
| 54 |
"""Init CaseSettings with default values.""" |
|
| 55 |
self.srcdir = default_srcdir |
|
| 56 |
self.index_json_path = Path('index.json')
|
|
| 57 |
self.report_spdx_included = True |
|
| 58 |
|
|
| 59 |
self.js_filenames = default_js_filenames.copy() |
|
| 60 |
self.dist_filenames = default_dist_filenames.copy() |
|
| 61 |
self.src_filenames = default_src_filenames.copy() |
|
| 62 |
|
|
| 63 |
self.sha1_hashes = default_sha1_hashes.copy() |
|
| 64 |
self.sha256_hashes = default_sha256_hashes.copy() |
|
| 65 |
self.contents = default_contents.copy() |
|
| 66 |
|
|
| 67 |
self.expected_resources = [{
|
|
| 68 |
'$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json', |
|
| 69 |
'source_name': 'hello', |
|
| 70 |
'source_copyright': [{
|
|
| 71 |
'file': 'report.spdx', |
|
| 72 |
'sha256': '!!!!value to fill during test!!!!' |
|
| 73 |
}, {
|
|
| 74 |
'file': 'LICENSES/CC0-1.0.txt', |
|
| 75 |
'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt'] |
|
| 76 |
}], |
|
| 77 |
'type': 'resource', |
|
| 78 |
'identifier': 'helloapple', |
|
| 79 |
'long_name': 'Hello Apple', |
|
| 80 |
'uuid': 'a6754dcb-58d8-4b7a-a245-24fd7ad4cd68', |
|
| 81 |
'version': [2021, 11, 10], |
|
| 82 |
'revision': 1, |
|
| 83 |
'description': 'greets an apple', |
|
| 84 |
'dependencies': [{'identifier': 'hello-message'}],
|
|
| 85 |
'scripts': [{
|
|
| 86 |
'file': 'hello.js', |
|
| 87 |
'sha256': self.sha256_hashes['hello.js'] |
|
| 88 |
}, {
|
|
| 89 |
'file': 'bye.js', |
|
| 90 |
'sha256': self.sha256_hashes['bye.js'] |
|
| 91 |
}], |
|
| 92 |
'generated_by': expected_generated_by |
|
| 93 |
}, {
|
|
| 94 |
'$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json', |
|
| 95 |
'source_name': 'hello', |
|
| 96 |
'source_copyright': [{
|
|
| 97 |
'file': 'report.spdx', |
|
| 98 |
'sha256': '!!!!value to fill during test!!!!' |
|
| 99 |
}, {
|
|
| 100 |
'file': 'LICENSES/CC0-1.0.txt', |
|
| 101 |
'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt'] |
|
| 102 |
}], |
|
| 103 |
'type': 'resource', |
|
| 104 |
'identifier': 'hello-message', |
|
| 105 |
'long_name': 'Hello Message', |
|
| 106 |
'uuid': '1ec36229-298c-4b35-8105-c4f2e1b9811e', |
|
| 107 |
'version': [2021, 11, 10], |
|
| 108 |
'revision': 2, |
|
| 109 |
'description': 'define messages for saying hello and bye', |
|
| 110 |
'dependencies': [], |
|
| 111 |
'scripts': [{
|
|
| 112 |
'file': 'message.js', |
|
| 113 |
'sha256': self.sha256_hashes['message.js'] |
|
| 114 |
}], |
|
| 115 |
'generated_by': expected_generated_by |
|
| 116 |
}] |
|
| 117 |
self.expected_mapping = {
|
|
| 118 |
'$schema': 'https://hydrilla.koszko.org/schemas/api_mapping_description-1.schema.json', |
|
| 119 |
'source_name': 'hello', |
|
| 120 |
'source_copyright': [{
|
|
| 121 |
'file': 'report.spdx', |
|
| 122 |
'sha256': '!!!!value to fill during test!!!!' |
|
| 123 |
}, {
|
|
| 124 |
'file': 'LICENSES/CC0-1.0.txt', |
|
| 125 |
'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt'] |
|
| 126 |
}], |
|
| 127 |
'type': 'mapping', |
|
| 128 |
'identifier': 'helloapple', |
|
| 129 |
'long_name': 'Hello Apple', |
|
| 130 |
'uuid': '54d23bba-472e-42f5-9194-eaa24c0e3ee7', |
|
| 131 |
'version': [2021, 11, 10], |
|
| 132 |
'description': 'causes apple to get greeted on Hydrillabugs issue tracker', |
|
| 133 |
'payloads': {
|
|
| 134 |
'https://hydrillabugs.koszko.org/***': {
|
|
| 135 |
'identifier': 'helloapple' |
|
| 136 |
}, |
|
| 137 |
'https://hachettebugs.koszko.org/***': {
|
|
| 138 |
'identifier': 'helloapple' |
|
| 139 |
} |
|
| 140 |
}, |
|
| 141 |
'generated_by': expected_generated_by |
|
| 142 |
} |
|
| 143 |
self.expected_source_description = {
|
|
| 144 |
'$schema': 'https://hydrilla.koszko.org/schemas/api_source_description-1.schema.json', |
|
| 145 |
'source_name': 'hello', |
|
| 146 |
'source_copyright': [{
|
|
| 147 |
'file': 'report.spdx', |
|
| 148 |
'sha256': '!!!!value to fill during test!!!!' |
|
| 149 |
}, {
|
|
| 150 |
'file': 'LICENSES/CC0-1.0.txt', |
|
| 151 |
'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt'] |
|
| 152 |
}], |
|
| 153 |
'source_archives': {
|
|
| 154 |
'zip': {
|
|
| 155 |
'sha256': '!!!!value to fill during test!!!!', |
|
| 156 |
} |
|
| 157 |
}, |
|
| 158 |
'upstream_url': 'https://git.koszko.org/hydrilla-source-package-example', |
|
| 159 |
'definitions': [{
|
|
| 160 |
'type': 'resource', |
|
| 161 |
'identifier': 'helloapple', |
|
| 162 |
'long_name': 'Hello Apple', |
|
| 163 |
'version': [2021, 11, 10], |
|
| 164 |
}, {
|
|
| 165 |
'type': 'resource', |
|
| 166 |
'identifier': 'hello-message', |
|
| 167 |
'long_name': 'Hello Message', |
|
| 168 |
'version': [2021, 11, 10], |
|
| 169 |
}, {
|
|
| 170 |
'type': 'mapping', |
|
| 171 |
'identifier': 'helloapple', |
|
| 172 |
'long_name': 'Hello Apple', |
|
| 173 |
'version': [2021, 11, 10], |
|
| 174 |
}], |
|
| 175 |
'generated_by': expected_generated_by |
|
| 176 |
} |
|
| 177 |
|
|
| 178 |
def expected(self) -> list[dict]: |
|
| 179 |
""" |
|
| 180 |
Convenience method to get a list of expected jsons of 2 resources, |
|
| 181 |
1 mapping and 1 source description we have. |
|
| 182 |
""" |
|
| 183 |
return [ |
|
| 184 |
*self.expected_resources, |
|
| 185 |
self.expected_mapping, |
|
| 186 |
self.expected_source_description |
|
| 187 |
] |
|
| 188 |
|
|
| 189 |
ModifyCb = Callable[[CaseSettings, dict], Optional[str]] |
|
| 190 |
|
|
| 191 |
def prepare_modified(tmpdir: Path, modify_cb: ModifyCb) -> CaseSettings: |
|
| 192 |
""" |
|
| 193 |
Use sample source package directory with an alternative, modified |
|
| 194 |
index.json. |
|
| 195 |
""" |
|
| 196 |
settings = CaseSettings() |
|
| 197 |
|
|
| 198 |
for fn in settings.src_filenames: |
|
| 199 |
copy_path = tmpdir / 'srcdir_copy' / fn |
|
| 200 |
copy_path.parent.mkdir(parents=True, exist_ok=True) |
|
| 201 |
shutil.copy(settings.srcdir / fn, copy_path) |
|
| 202 |
|
|
| 203 |
settings.srcdir = tmpdir / 'srcdir_copy' |
|
| 204 |
|
|
| 205 |
with open(settings.srcdir / 'index.json', 'rt') as file_handle: |
|
| 206 |
obj = json.loads(hydrilla_util.strip_json_comments(file_handle.read())) |
|
| 207 |
|
|
| 208 |
contents = modify_cb(settings, obj) |
|
| 209 |
|
|
| 210 |
# Replace the other index.json with new one |
|
| 211 |
settings.index_json_path = tmpdir / 'replacement.json' |
|
| 212 |
|
|
| 213 |
if contents is None: |
|
| 214 |
contents = json.dumps(obj) |
|
| 215 |
|
|
| 216 |
contents = contents.encode() |
|
| 217 |
|
|
| 218 |
settings.contents['index.json'] = contents |
|
| 219 |
|
|
| 220 |
settings.sha256_hashes['index.json'] = sha256(contents).digest().hex() |
|
| 221 |
settings.sha1_hashes['index.json'] = sha1(contents).digest().hex() |
|
| 222 |
|
|
| 223 |
with open(settings.index_json_path, 'wb') as file_handle: |
|
| 224 |
file_handle.write(contents) |
|
| 225 |
|
|
| 226 |
return settings |
|
| 227 |
|
|
| 228 |
@pytest.fixture() |
|
| 229 |
def tmpdir() -> Iterable[str]: |
|
| 230 |
with TemporaryDirectory() as tmpdir: |
|
| 231 |
yield tmpdir |
|
| 232 |
|
|
| 233 |
def prepare_default(tmpdir: Path) -> CaseSettings: |
|
| 234 |
"""Use sample source package directory as exists in VCS.""" |
|
| 235 |
return CaseSettings() |
|
| 236 |
|
|
| 237 |
def modify_index_good(settings: CaseSettings, obj: dict) -> None: |
|
| 238 |
""" |
|
| 239 |
Modify index.json object to make a slightly different but *also correct* one |
|
| 240 |
that can be used to test some different cases. |
|
| 241 |
""" |
|
| 242 |
# Add comments that should be preserved. |
|
| 243 |
for dictionary in (obj, settings.expected_source_description): |
|
| 244 |
dictionary['comment'] = 'index_json comment' |
|
| 245 |
|
|
| 246 |
for i, dicts in enumerate(zip(obj['definitions'], settings.expected())): |
|
| 247 |
for dictionary in dicts: |
|
| 248 |
dictionary['comment'] = f'item {i}'
|
|
| 249 |
|
|
| 250 |
# Remove spdx report generation |
|
| 251 |
del obj['reuse_generate_spdx_report'] |
|
| 252 |
obj['copyright'].remove({'file': 'report.spdx'})
|
|
| 253 |
|
|
| 254 |
settings.report_spdx_included = False |
|
| 255 |
|
|
| 256 |
for json_description in settings.expected(): |
|
| 257 |
json_description['source_copyright'] = \ |
|
| 258 |
[fr for fr in json_description['source_copyright'] |
|
| 259 |
if fr['file'] != 'report.spdx'] |
|
| 260 |
|
|
| 261 |
# Use default value ([]) for 'additionall_files' property |
|
| 262 |
del obj['additional_files'] |
|
| 263 |
|
|
| 264 |
settings.src_filenames = [*settings.dist_filenames, 'index.json'] |
|
| 265 |
|
|
| 266 |
# Use default value ([]) for 'scripts' property in one of the resources |
|
| 267 |
del obj['definitions'][1]['scripts'] |
|
| 268 |
|
|
| 269 |
settings.expected_resources[1]['scripts'] = [] |
|
| 270 |
|
|
| 271 |
for prefix in ('js', 'dist', 'src'):
|
|
| 272 |
getattr(settings, f'{prefix}_filenames').remove('message.js')
|
|
| 273 |
|
|
| 274 |
# Use default value ({}) for 'pyloads' property in mapping
|
|
| 275 |
del obj['definitions'][2]['payloads'] |
|
| 276 |
|
|
| 277 |
settings.expected_mapping['payloads'] = {}
|
|
| 278 |
|
|
| 279 |
# Don't use UUIDs (they are optional) |
|
| 280 |
for definition in obj['definitions']: |
|
| 281 |
del definition['uuid'] |
|
| 282 |
|
|
| 283 |
for description in settings.expected(): |
|
| 284 |
if 'uuid' in description: |
|
| 285 |
del description['uuid'] |
|
| 286 |
|
|
| 287 |
# Add some unrecognized properties that should be stripped |
|
| 288 |
to_process = [obj] |
|
| 289 |
while to_process: |
|
| 290 |
processed = to_process.pop() |
|
| 291 |
|
|
| 292 |
if type(processed) is list: |
|
| 293 |
to_process.extend(processed) |
|
| 294 |
elif type(processed) is dict and 'spurious_property' not in processed: |
|
| 295 |
to_process.extend(processed.values()) |
|
| 296 |
processed['spurious_property'] = 'some value' |
|
| 297 |
|
|
| 298 |
@pytest.mark.parametrize('prepare_source_example', [
|
|
| 299 |
prepare_default, |
|
| 300 |
lambda tmpdir: prepare_modified(tmpdir, modify_index_good) |
|
| 301 |
]) |
|
| 302 |
def test_build(tmpdir, prepare_source_example): |
|
| 303 |
"""Build the sample source package and verify the produced files.""" |
|
| 304 |
# First, build the package |
|
| 305 |
dstdir = Path(tmpdir) / 'dstdir' |
|
| 306 |
tmpdir = Path(tmpdir) / 'example' |
|
| 307 |
|
|
| 308 |
dstdir.mkdir(exist_ok=True) |
|
| 309 |
tmpdir.mkdir(exist_ok=True) |
|
| 310 |
|
|
| 311 |
settings = prepare_source_example(tmpdir) |
|
| 312 |
|
|
| 313 |
build.Build(settings.srcdir, settings.index_json_path)\ |
|
| 314 |
.write_package_files(dstdir) |
|
| 315 |
|
|
| 316 |
# Verify directories under destination directory |
|
| 317 |
assert {'file', 'resource', 'mapping', 'source'} == \
|
|
| 318 |
set([path.name for path in dstdir.iterdir()]) |
|
| 319 |
|
|
| 320 |
# Verify files under 'file/' |
|
| 321 |
file_dir = dstdir / 'file' / 'sha256' |
|
| 322 |
|
|
| 323 |
for fn in settings.dist_filenames: |
|
| 324 |
dist_file_path = file_dir / settings.sha256_hashes[fn] |
|
| 325 |
assert dist_file_path.is_file() |
|
| 326 |
|
|
| 327 |
assert dist_file_path.read_bytes() == settings.contents[fn] |
|
| 328 |
|
|
| 329 |
sha256_hashes_set = set([settings.sha256_hashes[fn] |
|
| 330 |
for fn in settings.dist_filenames]) |
|
| 331 |
|
|
| 332 |
spdx_report_sha256 = None |
|
| 333 |
|
|
| 334 |
for path in file_dir.iterdir(): |
|
| 335 |
if path.name in sha256_hashes_set: |
|
| 336 |
continue |
|
| 337 |
|
|
| 338 |
assert spdx_report_sha256 is None and settings.report_spdx_included |
|
| 339 |
|
|
| 340 |
with open(path, 'rt') as file_handle: |
|
| 341 |
spdx_contents = file_handle.read() |
|
| 342 |
|
|
| 343 |
spdx_report_sha256 = sha256(spdx_contents.encode()).digest().hex() |
|
| 344 |
assert spdx_report_sha256 == path.name |
|
| 345 |
|
|
| 346 |
for fn in settings.src_filenames: |
|
| 347 |
if not any([n in fn.lower() for n in ('license', 'reuse')]):
|
|
| 348 |
assert settings.sha1_hashes[fn] |
|
| 349 |
|
|
| 350 |
if settings.report_spdx_included: |
|
| 351 |
assert spdx_report_sha256 |
|
| 352 |
for obj in settings.expected(): |
|
| 353 |
for file_ref in obj['source_copyright']: |
|
| 354 |
if file_ref['file'] == 'report.spdx': |
|
| 355 |
file_ref['sha256'] = spdx_report_sha256 |
|
| 356 |
|
|
| 357 |
# Verify files under 'resource/' |
|
| 358 |
resource_dir = dstdir / 'resource' |
|
| 359 |
|
|
| 360 |
assert set([rj['identifier'] for rj in settings.expected_resources]) == \ |
|
| 361 |
set([path.name for path in resource_dir.iterdir()]) |
|
| 362 |
|
|
| 363 |
for resource_json in settings.expected_resources: |
|
| 364 |
subdir = resource_dir / resource_json['identifier'] |
|
| 365 |
assert ['2021.11.10'] == [path.name for path in subdir.iterdir()] |
|
| 366 |
|
|
| 367 |
with open(subdir / '2021.11.10', 'rt') as file_handle: |
|
| 368 |
assert json.load(file_handle) == resource_json |
|
| 369 |
|
|
| 370 |
hydrilla_util.validator_for('api_resource_description-1.0.1.schema.json')\
|
|
| 371 |
.validate(resource_json) |
|
| 372 |
|
|
| 373 |
# Verify files under 'mapping/' |
|
| 374 |
mapping_dir = dstdir / 'mapping' |
|
| 375 |
assert ['helloapple'] == [path.name for path in mapping_dir.iterdir()] |
|
| 376 |
|
|
| 377 |
subdir = mapping_dir / 'helloapple' |
|
| 378 |
assert ['2021.11.10'] == [path.name for path in subdir.iterdir()] |
|
| 379 |
|
|
| 380 |
with open(subdir / '2021.11.10', 'rt') as file_handle: |
|
| 381 |
assert json.load(file_handle) == settings.expected_mapping |
|
| 382 |
|
|
| 383 |
hydrilla_util.validator_for('api_mapping_description-1.0.1.schema.json')\
|
|
| 384 |
.validate(settings.expected_mapping) |
|
| 385 |
|
|
| 386 |
# Verify files under 'source/' |
|
| 387 |
source_dir = dstdir / 'source' |
|
| 388 |
assert {'hello.json', 'hello.zip'} == \
|
|
| 389 |
set([path.name for path in source_dir.iterdir()]) |
|
| 390 |
|
|
| 391 |
zip_filenames = [f'hello/{fn}' for fn in settings.src_filenames]
|
|
| 392 |
|
|
| 393 |
with ZipFile(source_dir / 'hello.zip', 'r') as archive: |
|
| 394 |
assert set([f.filename for f in archive.filelist]) == set(zip_filenames) |
|
| 395 |
|
|
| 396 |
for zip_fn, src_fn in zip(zip_filenames, settings.src_filenames): |
|
| 397 |
with archive.open(zip_fn, 'r') as zip_file_handle: |
|
| 398 |
assert zip_file_handle.read() == settings.contents[src_fn] |
|
| 399 |
|
|
| 400 |
zip_ref = settings.expected_source_description['source_archives']['zip'] |
|
| 401 |
with open(source_dir / 'hello.zip', 'rb') as file_handle: |
|
| 402 |
zip_ref['sha256'] = sha256(file_handle.read()).digest().hex() |
|
| 403 |
|
|
| 404 |
with open(source_dir / 'hello.json', 'rt') as file_handle: |
|
| 405 |
assert json.load(file_handle) == settings.expected_source_description |
|
| 406 |
|
|
| 407 |
hydrilla_util.validator_for('api_source_description-1.0.1.schema.json')\
|
|
| 408 |
.validate(settings.expected_source_description) |
|
| 409 |
|
|
| 410 |
def modify_index_missing_file(dummy: CaseSettings, obj: dict) -> None: |
|
| 411 |
""" |
|
| 412 |
Modify index.json to expect missing report.spdx file and cause an error. |
|
| 413 |
""" |
|
| 414 |
del obj['reuse_generate_spdx_report'] |
|
| 415 |
|
|
| 416 |
def modify_index_schema_error(dummy: CaseSettings, obj: dict) -> None: |
|
| 417 |
"""Modify index.json to be incompliant with the schema.""" |
|
| 418 |
del obj['definitions'] |
|
| 419 |
|
|
| 420 |
def modify_index_bad_comment(dummy: CaseSettings, obj: dict) -> str: |
|
| 421 |
"""Modify index.json to have an invalid '/' in it.""" |
|
| 422 |
return json.dumps(obj) + '/something\n' |
|
| 423 |
|
|
| 424 |
def modify_index_bad_json(dummy: CaseSettings, obj: dict) -> str: |
|
| 425 |
"""Modify index.json to not be valid json even after comment stripping.""" |
|
| 426 |
return json.dumps(obj) + '???/\n' |
|
| 427 |
|
|
| 428 |
def modify_index_missing_license(settings: CaseSettings, obj: dict) -> None: |
|
| 429 |
"""Remove a file to make package REUSE-incompliant.""" |
|
| 430 |
(settings.srcdir / 'README.txt.license').unlink() |
|
| 431 |
|
|
| 432 |
def modify_index_file_outside(dummy: CaseSettings, obj: dict) -> None: |
|
| 433 |
"""Make index.json illegally reference a file outside srcdir.""" |
|
| 434 |
obj['copyright'].append({'file': '../abc'})
|
|
| 435 |
|
|
| 436 |
def modify_index_reference_itself(dummy: CaseSettings, obj: dict) -> None: |
|
| 437 |
"""Make index.json illegally reference index.json.""" |
|
| 438 |
obj['copyright'].append({'file': 'index.json'})
|
|
| 439 |
|
|
| 440 |
def modify_index_report_excluded(dummy: CaseSettings, obj: dict) -> None: |
|
| 441 |
""" |
|
| 442 |
Make index.json require generation of index.json but not include it among |
|
| 443 |
copyright files. |
|
| 444 |
""" |
|
| 445 |
obj['copyright'] = [fr for fr in obj['copyright'] |
|
| 446 |
if fr['file'] != 'report.spdx'] |
|
| 447 |
|
|
| 448 |
@pytest.mark.parametrize('break_index_json', [
|
|
| 449 |
(modify_index_missing_file, FileNotFoundError), |
|
| 450 |
(modify_index_schema_error, ValidationError), |
|
| 451 |
(modify_index_bad_comment, json.JSONDecodeError), |
|
| 452 |
(modify_index_bad_json, json.JSONDecodeError), |
|
| 453 |
(modify_index_missing_license, build.ReuseError), |
|
| 454 |
(modify_index_file_outside, build.FileReferenceError), |
|
| 455 |
(modify_index_reference_itself, build.FileReferenceError), |
|
| 456 |
(modify_index_report_excluded, build.FileReferenceError) |
|
| 457 |
]) |
|
| 458 |
def test_build_error(tmpdir: str, break_index_json: tuple[ModifyCb, type]): |
|
| 459 |
"""Build the sample source package and verify the produced files.""" |
|
| 460 |
dstdir = Path(tmpdir) / 'dstdir' |
|
| 461 |
tmpdir = Path(tmpdir) / 'example' |
|
| 462 |
|
|
| 463 |
dstdir.mkdir(exist_ok=True) |
|
| 464 |
tmpdir.mkdir(exist_ok=True) |
|
| 465 |
|
|
| 466 |
modify_cb, error_type = break_index_json |
|
| 467 |
|
|
| 468 |
settings = prepare_modified(tmpdir, modify_cb) |
|
| 469 |
|
|
| 470 |
with pytest.raises(error_type): |
|
| 471 |
build.Build(settings.srcdir, settings.index_json_path)\ |
|
| 472 |
.write_package_files(dstdir) |
|
| tests/source-package-example | ||
|---|---|---|
| 1 |
Subproject commit 92a4d31c659b2336e5e188877d1ce6bfad2fa310 |
|
| tests/test_hydrilla_builder.py | ||
|---|---|---|
| 1 |
# SPDX-License-Identifier: CC0-1.0 |
|
| 2 |
|
|
| 3 |
# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org> |
|
| 4 |
# |
|
| 5 |
# Available under the terms of Creative Commons Zero v1.0 Universal. |
|
| 6 |
|
|
| 7 |
# Enable using with Python 3.7. |
|
| 8 |
from __future__ import annotations |
|
| 9 |
|
|
| 10 |
import pytest |
|
| 11 |
import json |
|
| 12 |
import shutil |
|
| 13 |
|
|
| 14 |
from tempfile import TemporaryDirectory |
|
| 15 |
from pathlib import Path |
|
| 16 |
from hashlib import sha256, sha1 |
|
| 17 |
from zipfile import ZipFile |
|
| 18 |
from typing import Callable, Optional, Iterable |
|
| 19 |
|
|
| 20 |
from jsonschema import ValidationError |
|
| 21 |
|
|
| 22 |
from hydrilla import util as hydrilla_util |
|
| 23 |
from hydrilla.builder import build, _version |
|
| 24 |
|
|
| 25 |
here = Path(__file__).resolve().parent |
|
| 26 |
|
|
| 27 |
expected_generated_by = {
|
|
| 28 |
'name': 'hydrilla.builder', |
|
| 29 |
'version': _version.version |
|
| 30 |
} |
|
| 31 |
|
|
| 32 |
default_srcdir = here / 'source-package-example' |
|
| 33 |
|
|
| 34 |
default_js_filenames = ['bye.js', 'hello.js', 'message.js'] |
|
| 35 |
default_dist_filenames = [*default_js_filenames, 'LICENSES/CC0-1.0.txt'] |
|
| 36 |
default_src_filenames = [ |
|
| 37 |
*default_dist_filenames, |
|
| 38 |
'README.txt', 'README.txt.license', '.reuse/dep5', 'index.json' |
|
| 39 |
] |
|
| 40 |
|
|
| 41 |
default_sha1_hashes = {}
|
|
| 42 |
default_sha256_hashes = {}
|
|
| 43 |
default_contents = {}
|
|
| 44 |
|
|
| 45 |
for fn in default_src_filenames: |
|
| 46 |
with open(default_srcdir / fn, 'rb') as file_handle: |
|
| 47 |
default_contents[fn] = file_handle.read() |
|
| 48 |
default_sha256_hashes[fn] = sha256(default_contents[fn]).digest().hex() |
|
| 49 |
default_sha1_hashes[fn] = sha1(default_contents[fn]).digest().hex() |
|
| 50 |
|
|
| 51 |
class CaseSettings: |
|
| 52 |
"""Gather parametrized values in a class.""" |
|
| 53 |
def __init__(self): |
|
| 54 |
"""Init CaseSettings with default values.""" |
|
| 55 |
self.srcdir = default_srcdir |
|
| 56 |
self.index_json_path = Path('index.json')
|
|
| 57 |
self.report_spdx_included = True |
|
| 58 |
|
|
| 59 |
self.js_filenames = default_js_filenames.copy() |
|
| 60 |
self.dist_filenames = default_dist_filenames.copy() |
|
| 61 |
self.src_filenames = default_src_filenames.copy() |
|
| 62 |
|
|
| 63 |
self.sha1_hashes = default_sha1_hashes.copy() |
|
| 64 |
self.sha256_hashes = default_sha256_hashes.copy() |
|
| 65 |
self.contents = default_contents.copy() |
|
| 66 |
|
|
| 67 |
self.expected_resources = [{
|
|
| 68 |
'$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json', |
|
| 69 |
'source_name': 'hello', |
|
| 70 |
'source_copyright': [{
|
|
| 71 |
'file': 'report.spdx', |
|
| 72 |
'sha256': '!!!!value to fill during test!!!!' |
|
| 73 |
}, {
|
|
| 74 |
'file': 'LICENSES/CC0-1.0.txt', |
|
| 75 |
'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt'] |
|
| 76 |
}], |
|
| 77 |
'type': 'resource', |
|
| 78 |
'identifier': 'helloapple', |
|
| 79 |
'long_name': 'Hello Apple', |
|
| 80 |
'uuid': 'a6754dcb-58d8-4b7a-a245-24fd7ad4cd68', |
|
| 81 |
'version': [2021, 11, 10], |
|
| 82 |
'revision': 1, |
|
| 83 |
'description': 'greets an apple', |
|
| 84 |
'dependencies': [{'identifier': 'hello-message'}],
|
|
| 85 |
'scripts': [{
|
|
| 86 |
'file': 'hello.js', |
|
| 87 |
'sha256': self.sha256_hashes['hello.js'] |
|
| 88 |
}, {
|
|
| 89 |
'file': 'bye.js', |
|
| 90 |
'sha256': self.sha256_hashes['bye.js'] |
|
| 91 |
}], |
|
| 92 |
'generated_by': expected_generated_by |
|
| 93 |
}, {
|
|
| 94 |
'$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json', |
|
| 95 |
'source_name': 'hello', |
|
| 96 |
'source_copyright': [{
|
|
| 97 |
'file': 'report.spdx', |
|
| 98 |
'sha256': '!!!!value to fill during test!!!!' |
|
| 99 |
}, {
|
|
| 100 |
'file': 'LICENSES/CC0-1.0.txt', |
|
| 101 |
'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt'] |
|
| 102 |
}], |
|
| 103 |
'type': 'resource', |
|
| 104 |
'identifier': 'hello-message', |
|
| 105 |
'long_name': 'Hello Message', |
|
| 106 |
'uuid': '1ec36229-298c-4b35-8105-c4f2e1b9811e', |
|
| 107 |
'version': [2021, 11, 10], |
|
| 108 |
'revision': 2, |
|
| 109 |
'description': 'define messages for saying hello and bye', |
|
| 110 |
'dependencies': [], |
|
| 111 |
'scripts': [{
|
|
| 112 |
'file': 'message.js', |
|
| 113 |
'sha256': self.sha256_hashes['message.js'] |
|
| 114 |
}], |
|
| 115 |
'generated_by': expected_generated_by |
|
| 116 |
}] |
|
| 117 |
self.expected_mapping = {
|
|
| 118 |
'$schema': 'https://hydrilla.koszko.org/schemas/api_mapping_description-1.schema.json', |
|
| 119 |
'source_name': 'hello', |
|
| 120 |
'source_copyright': [{
|
|
| 121 |
'file': 'report.spdx', |
|
| 122 |
'sha256': '!!!!value to fill during test!!!!' |
|
| 123 |
}, {
|
|
| 124 |
'file': 'LICENSES/CC0-1.0.txt', |
|
| 125 |
'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt'] |
|
| 126 |
}], |
|
| 127 |
'type': 'mapping', |
|
| 128 |
'identifier': 'helloapple', |
|
| 129 |
'long_name': 'Hello Apple', |
|
| 130 |
'uuid': '54d23bba-472e-42f5-9194-eaa24c0e3ee7', |
|
| 131 |
'version': [2021, 11, 10], |
|
| 132 |
'description': 'causes apple to get greeted on Hydrillabugs issue tracker', |
|
| 133 |
'payloads': {
|
|
| 134 |
'https://hydrillabugs.koszko.org/***': {
|
|
| 135 |
'identifier': 'helloapple' |
|
| 136 |
}, |
|
| 137 |
'https://hachettebugs.koszko.org/***': {
|
|
| 138 |
'identifier': 'helloapple' |
|
| 139 |
} |
|
| 140 |
}, |
|
| 141 |
'generated_by': expected_generated_by |
|
| 142 |
} |
|
| 143 |
self.expected_source_description = {
|
|
| 144 |
'$schema': 'https://hydrilla.koszko.org/schemas/api_source_description-1.schema.json', |
|
| 145 |
'source_name': 'hello', |
|
| 146 |
'source_copyright': [{
|
|
| 147 |
'file': 'report.spdx', |
|
| 148 |
'sha256': '!!!!value to fill during test!!!!' |
|
| 149 |
}, {
|
|
| 150 |
'file': 'LICENSES/CC0-1.0.txt', |
|
| 151 |
'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt'] |
|
| 152 |
}], |
|
| 153 |
'source_archives': {
|
|
| 154 |
'zip': {
|
|
| 155 |
'sha256': '!!!!value to fill during test!!!!', |
|
| 156 |
} |
|
| 157 |
}, |
|
| 158 |
'upstream_url': 'https://git.koszko.org/hydrilla-source-package-example', |
|
| 159 |
'definitions': [{
|
|
| 160 |
'type': 'resource', |
|
| 161 |
'identifier': 'helloapple', |
|
| 162 |
'long_name': 'Hello Apple', |
|
| 163 |
'version': [2021, 11, 10], |
|
| 164 |
}, {
|
|
| 165 |
'type': 'resource', |
|
| 166 |
'identifier': 'hello-message', |
|
| 167 |
'long_name': 'Hello Message', |
|
| 168 |
'version': [2021, 11, 10], |
|
| 169 |
}, {
|
|
| 170 |
'type': 'mapping', |
|
| 171 |
'identifier': 'helloapple', |
|
| 172 |
'long_name': 'Hello Apple', |
|
| 173 |
'version': [2021, 11, 10], |
|
| 174 |
}], |
|
| 175 |
'generated_by': expected_generated_by |
|
| 176 |
} |
|
| 177 |
|
|
| 178 |
def expected(self) -> list[dict]: |
|
| 179 |
""" |
|
| 180 |
Convenience method to get a list of expected jsons of 2 resources, |
|
| 181 |
1 mapping and 1 source description we have. |
|
| 182 |
""" |
|
| 183 |
return [ |
|
| 184 |
*self.expected_resources, |
|
| 185 |
self.expected_mapping, |
|
| 186 |
self.expected_source_description |
|
| 187 |
] |
|
| 188 |
|
|
| 189 |
ModifyCb = Callable[[CaseSettings, dict], Optional[str]] |
|
| 190 |
|
|
| 191 |
def prepare_modified(tmpdir: Path, modify_cb: ModifyCb) -> CaseSettings: |
|
| 192 |
""" |
|
| 193 |
Use sample source package directory with an alternative, modified |
|
| 194 |
index.json. |
|
| 195 |
""" |
|
| 196 |
settings = CaseSettings() |
|
| 197 |
|
|
| 198 |
for fn in settings.src_filenames: |
|
| 199 |
copy_path = tmpdir / 'srcdir_copy' / fn |
|
| 200 |
copy_path.parent.mkdir(parents=True, exist_ok=True) |
|
| 201 |
shutil.copy(settings.srcdir / fn, copy_path) |
|
| 202 |
|
|
| 203 |
settings.srcdir = tmpdir / 'srcdir_copy' |
|
| 204 |
|
|
| 205 |
with open(settings.srcdir / 'index.json', 'rt') as file_handle: |
|
| 206 |
obj = json.loads(hydrilla_util.strip_json_comments(file_handle.read())) |
|
| 207 |
|
|
| 208 |
contents = modify_cb(settings, obj) |
|
| 209 |
|
|
| 210 |
# Replace the other index.json with new one |
|
| 211 |
settings.index_json_path = tmpdir / 'replacement.json' |
|
| 212 |
|
|
| 213 |
if contents is None: |
|
| 214 |
contents = json.dumps(obj) |
|
| 215 |
|
|
| 216 |
contents = contents.encode() |
|
| 217 |
|
|
| 218 |
settings.contents['index.json'] = contents |
|
| 219 |
|
|
| 220 |
settings.sha256_hashes['index.json'] = sha256(contents).digest().hex() |
|
| 221 |
settings.sha1_hashes['index.json'] = sha1(contents).digest().hex() |
|
| 222 |
|
|
| 223 |
with open(settings.index_json_path, 'wb') as file_handle: |
|
| 224 |
file_handle.write(contents) |
|
| 225 |
|
|
| 226 |
return settings |
|
| 227 |
|
|
| 228 |
@pytest.fixture() |
|
| 229 |
def tmpdir() -> Iterable[str]: |
|
| 230 |
with TemporaryDirectory() as tmpdir: |
|
| 231 |
yield tmpdir |
|
| 232 |
|
|
| 233 |
def prepare_default(tmpdir: Path) -> CaseSettings: |
|
| 234 |
"""Use sample source package directory as exists in VCS.""" |
|
| 235 |
return CaseSettings() |
|
| 236 |
|
|
| 237 |
def modify_index_good(settings: CaseSettings, obj: dict) -> None: |
|
| 238 |
""" |
|
| 239 |
Modify index.json object to make a slightly different but *also correct* one |
|
| 240 |
that can be used to test some different cases. |
|
| 241 |
""" |
|
| 242 |
# Add comments that should be preserved. |
|
| 243 |
for dictionary in (obj, settings.expected_source_description): |
|
| 244 |
dictionary['comment'] = 'index_json comment' |
|
| 245 |
|
|
| 246 |
for i, dicts in enumerate(zip(obj['definitions'], settings.expected())): |
|
| 247 |
for dictionary in dicts: |
|
| 248 |
dictionary['comment'] = f'item {i}'
|
|
| 249 |
|
|
| 250 |
# Remove spdx report generation |
|
| 251 |
del obj['reuse_generate_spdx_report'] |
|
| 252 |
obj['copyright'].remove({'file': 'report.spdx'})
|
|
| 253 |
|
|
| 254 |
settings.report_spdx_included = False |
|
| 255 |
|
|
| 256 |
for json_description in settings.expected(): |
|
| 257 |
json_description['source_copyright'] = \ |
|
| 258 |
[fr for fr in json_description['source_copyright'] |
|
| 259 |
if fr['file'] != 'report.spdx'] |
|
| 260 |
|
|
| 261 |
# Use default value ([]) for 'additionall_files' property |
|
| 262 |
del obj['additional_files'] |
|
| 263 |
|
|
| 264 |
settings.src_filenames = [*settings.dist_filenames, 'index.json'] |
|
| 265 |
|
|
| 266 |
# Use default value ([]) for 'scripts' property in one of the resources |
|
| 267 |
del obj['definitions'][1]['scripts'] |
|
| 268 |
|
|
| 269 |
settings.expected_resources[1]['scripts'] = [] |
|
| 270 |
|
|
| 271 |
for prefix in ('js', 'dist', 'src'):
|
|
| 272 |
getattr(settings, f'{prefix}_filenames').remove('message.js')
|
|
| 273 |
|
|
| 274 |
# Use default value ({}) for 'pyloads' property in mapping
|
|
| 275 |
del obj['definitions'][2]['payloads'] |
|
| 276 |
|
|
| 277 |
settings.expected_mapping['payloads'] = {}
|
|
| 278 |
|
|
| 279 |
# Don't use UUIDs (they are optional) |
|
| 280 |
for definition in obj['definitions']: |
|
| 281 |
del definition['uuid'] |
|
| 282 |
|
|
| 283 |
for description in settings.expected(): |
|
| 284 |
if 'uuid' in description: |
|
| 285 |
del description['uuid'] |
|
| 286 |
|
|
| 287 |
# Add some unrecognized properties that should be stripped |
|
| 288 |
to_process = [obj] |
|
| 289 |
while to_process: |
|
| 290 |
processed = to_process.pop() |
|
| 291 |
|
|
| 292 |
if type(processed) is list: |
|
| 293 |
to_process.extend(processed) |
|
| 294 |
elif type(processed) is dict and 'spurious_property' not in processed: |
|
| 295 |
to_process.extend(processed.values()) |
|
| 296 |
processed['spurious_property'] = 'some value' |
|
| 297 |
|
|
| 298 |
@pytest.mark.parametrize('prepare_source_example', [
|
|
| 299 |
prepare_default, |
|
| 300 |
lambda tmpdir: prepare_modified(tmpdir, modify_index_good) |
|
| 301 |
]) |
|
| 302 |
def test_build(tmpdir, prepare_source_example): |
|
| 303 |
"""Build the sample source package and verify the produced files.""" |
|
| 304 |
# First, build the package |
|
| 305 |
dstdir = Path(tmpdir) / 'dstdir' |
|
| 306 |
tmpdir = Path(tmpdir) / 'example' |
|
| 307 |
|
|
| 308 |
dstdir.mkdir(exist_ok=True) |
|
| 309 |
tmpdir.mkdir(exist_ok=True) |
|
| 310 |
|
|
| 311 |
settings = prepare_source_example(tmpdir) |
|
| 312 |
|
|
| 313 |
build.Build(settings.srcdir, settings.index_json_path)\ |
|
| 314 |
.write_package_files(dstdir) |
|
| 315 |
|
|
| 316 |
# Verify directories under destination directory |
|
| 317 |
assert {'file', 'resource', 'mapping', 'source'} == \
|
|
| 318 |
set([path.name for path in dstdir.iterdir()]) |
|
| 319 |
|
|
| 320 |
# Verify files under 'file/' |
|
| 321 |
file_dir = dstdir / 'file' / 'sha256' |
|
| 322 |
|
|
| 323 |
for fn in settings.dist_filenames: |
|
| 324 |
dist_file_path = file_dir / settings.sha256_hashes[fn] |
|
| 325 |
assert dist_file_path.is_file() |
|
| 326 |
|
|
| 327 |
assert dist_file_path.read_bytes() == settings.contents[fn] |
|
| 328 |
|
|
| 329 |
sha256_hashes_set = set([settings.sha256_hashes[fn] |
|
| 330 |
for fn in settings.dist_filenames]) |
|
| 331 |
|
|
| 332 |
spdx_report_sha256 = None |
|
| 333 |
|
|
| 334 |
for path in file_dir.iterdir(): |
|
| 335 |
if path.name in sha256_hashes_set: |
|
| 336 |
continue |
|
| 337 |
|
|
| 338 |
assert spdx_report_sha256 is None and settings.report_spdx_included |
|
| 339 |
|
|
| 340 |
with open(path, 'rt') as file_handle: |
|
| 341 |
spdx_contents = file_handle.read() |
|
| 342 |
|
|
| 343 |
spdx_report_sha256 = sha256(spdx_contents.encode()).digest().hex() |
|
| 344 |
assert spdx_report_sha256 == path.name |
|
| 345 |
|
|
| 346 |
for fn in settings.src_filenames: |
|
| 347 |
if not any([n in fn.lower() for n in ('license', 'reuse')]):
|
|
| 348 |
assert settings.sha1_hashes[fn] |
|
| 349 |
|
|
| 350 |
if settings.report_spdx_included: |
|
| 351 |
assert spdx_report_sha256 |
|
| 352 |
for obj in settings.expected(): |
|
| 353 |
for file_ref in obj['source_copyright']: |
|
| 354 |
if file_ref['file'] == 'report.spdx': |
|
| 355 |
file_ref['sha256'] = spdx_report_sha256 |
|
| 356 |
|
|
| 357 |
# Verify files under 'resource/' |
|
| 358 |
resource_dir = dstdir / 'resource' |
|
| 359 |
|
|
| 360 |
assert set([rj['identifier'] for rj in settings.expected_resources]) == \ |
|
| 361 |
set([path.name for path in resource_dir.iterdir()]) |
|
| 362 |
|
|
| 363 |
for resource_json in settings.expected_resources: |
|
| 364 |
subdir = resource_dir / resource_json['identifier'] |
|
| 365 |
assert ['2021.11.10'] == [path.name for path in subdir.iterdir()] |
|
| 366 |
|
|
| 367 |
with open(subdir / '2021.11.10', 'rt') as file_handle: |
|
| 368 |
assert json.load(file_handle) == resource_json |
|
| 369 |
|
|
| 370 |
hydrilla_util.validator_for('api_resource_description-1.0.1.schema.json')\
|
|
| 371 |
.validate(resource_json) |
|
| 372 |
|
|
| 373 |
# Verify files under 'mapping/' |
|
| 374 |
mapping_dir = dstdir / 'mapping' |
|
| 375 |
assert ['helloapple'] == [path.name for path in mapping_dir.iterdir()] |
|
| 376 |
|
|
| 377 |
subdir = mapping_dir / 'helloapple' |
|
| 378 |
assert ['2021.11.10'] == [path.name for path in subdir.iterdir()] |
|
| 379 |
|
|
| 380 |
with open(subdir / '2021.11.10', 'rt') as file_handle: |
|
| 381 |
assert json.load(file_handle) == settings.expected_mapping |
|
| 382 |
|
|
| 383 |
hydrilla_util.validator_for('api_mapping_description-1.0.1.schema.json')\
|
|
| 384 |
.validate(settings.expected_mapping) |
|
| 385 |
|
|
| 386 |
# Verify files under 'source/' |
|
| 387 |
source_dir = dstdir / 'source' |
|
| 388 |
assert {'hello.json', 'hello.zip'} == \
|
|
| 389 |
set([path.name for path in source_dir.iterdir()]) |
|
| 390 |
|
|
| 391 |
zip_filenames = [f'hello/{fn}' for fn in settings.src_filenames]
|
|
| 392 |
|
|
| 393 |
with ZipFile(source_dir / 'hello.zip', 'r') as archive: |
|
| 394 |
assert set([f.filename for f in archive.filelist]) == set(zip_filenames) |
|
| 395 |
|
|
| 396 |
for zip_fn, src_fn in zip(zip_filenames, settings.src_filenames): |
|
| 397 |
with archive.open(zip_fn, 'r') as zip_file_handle: |
|
| 398 |
assert zip_file_handle.read() == settings.contents[src_fn] |
|
| 399 |
|
|
| 400 |
zip_ref = settings.expected_source_description['source_archives']['zip'] |
|
| 401 |
with open(source_dir / 'hello.zip', 'rb') as file_handle: |
|
| 402 |
zip_ref['sha256'] = sha256(file_handle.read()).digest().hex() |
|
| 403 |
|
|
| 404 |
with open(source_dir / 'hello.json', 'rt') as file_handle: |
|
| 405 |
assert json.load(file_handle) == settings.expected_source_description |
|
| 406 |
|
|
| 407 |
hydrilla_util.validator_for('api_source_description-1.0.1.schema.json')\
|
|
| 408 |
.validate(settings.expected_source_description) |
|
| 409 |
|
|
| 410 |
def modify_index_missing_file(dummy: CaseSettings, obj: dict) -> None: |
|
| 411 |
""" |
|
| 412 |
Modify index.json to expect missing report.spdx file and cause an error. |
|
| 413 |
""" |
|
| 414 |
del obj['reuse_generate_spdx_report'] |
|
| 415 |
|
|
| 416 |
def modify_index_schema_error(dummy: CaseSettings, obj: dict) -> None: |
|
| 417 |
"""Modify index.json to be incompliant with the schema.""" |
|
| 418 |
del obj['definitions'] |
|
| 419 |
|
|
| 420 |
def modify_index_bad_comment(dummy: CaseSettings, obj: dict) -> str: |
|
| 421 |
"""Modify index.json to have an invalid '/' in it.""" |
|
| 422 |
return json.dumps(obj) + '/something\n' |
|
| 423 |
|
|
| 424 |
def modify_index_bad_json(dummy: CaseSettings, obj: dict) -> str: |
|
| 425 |
"""Modify index.json to not be valid json even after comment stripping.""" |
|
| 426 |
return json.dumps(obj) + '???/\n' |
|
| 427 |
|
|
| 428 |
def modify_index_missing_license(settings: CaseSettings, obj: dict) -> None: |
|
| 429 |
"""Remove a file to make package REUSE-incompliant.""" |
|
| 430 |
(settings.srcdir / 'README.txt.license').unlink() |
|
| 431 |
|
|
| 432 |
def modify_index_file_outside(dummy: CaseSettings, obj: dict) -> None: |
|
| 433 |
"""Make index.json illegally reference a file outside srcdir.""" |
|
| 434 |
obj['copyright'].append({'file': '../abc'})
|
|
| 435 |
|
|
| 436 |
def modify_index_reference_itself(dummy: CaseSettings, obj: dict) -> None: |
|
| 437 |
"""Make index.json illegally reference index.json.""" |
|
| 438 |
obj['copyright'].append({'file': 'index.json'})
|
|
| 439 |
|
|
| 440 |
def modify_index_report_excluded(dummy: CaseSettings, obj: dict) -> None: |
|
| 441 |
""" |
|
| 442 |
Make index.json require generation of index.json but not include it among |
|
| 443 |
copyright files. |
|
| 444 |
""" |
|
| 445 |
obj['copyright'] = [fr for fr in obj['copyright'] |
|
| 446 |
if fr['file'] != 'report.spdx'] |
|
| 447 |
|
|
| 448 |
@pytest.mark.parametrize('break_index_json', [
|
|
| 449 |
(modify_index_missing_file, FileNotFoundError), |
|
| 450 |
(modify_index_schema_error, ValidationError), |
|
| 451 |
(modify_index_bad_comment, json.JSONDecodeError), |
|
| 452 |
(modify_index_bad_json, json.JSONDecodeError), |
|
| 453 |
(modify_index_missing_license, build.ReuseError), |
|
| 454 |
(modify_index_file_outside, build.FileReferenceError), |
|
| 455 |
(modify_index_reference_itself, build.FileReferenceError), |
|
| 456 |
(modify_index_report_excluded, build.FileReferenceError) |
|
| 457 |
]) |
|
| 458 |
def test_build_error(tmpdir: str, break_index_json: tuple[ModifyCb, type]): |
|
| 459 |
"""Build the sample source package and verify the produced files.""" |
|
| 460 |
dstdir = Path(tmpdir) / 'dstdir' |
|
| 461 |
tmpdir = Path(tmpdir) / 'example' |
|
| 462 |
|
|
| 463 |
dstdir.mkdir(exist_ok=True) |
|
| 464 |
tmpdir.mkdir(exist_ok=True) |
|
| 465 |
|
|
| 466 |
modify_cb, error_type = break_index_json |
|
| 467 |
|
|
| 468 |
settings = prepare_modified(tmpdir, modify_cb) |
|
| 469 |
|
|
| 470 |
with pytest.raises(error_type): |
|
| 471 |
build.Build(settings.srcdir, settings.index_json_path)\ |
|
| 472 |
.write_package_files(dstdir) |
|
Also available in: Unified diff
use pyproject.toml for pytest configuration and move all tests to tests/ directory