Revision 22c722bf
Added by koszko about 1 year ago
- ID 22c722bf59e59246f47491c7229b17f9ef783614
- Parent 6bc04f80
| MANIFEST.in | ||
|---|---|---|
| 4 | 4 |
# |
| 5 | 5 |
# Available under the terms of Creative Commons Zero v1.0 Universal. |
| 6 | 6 |
|
| 7 |
include src/hydrilla/schemas/*.schema.json* |
|
| 7 |
include src/hydrilla/schemas/*/*.schema.json*
|
|
| 8 | 8 |
include src/hydrilla/builder/locales/*/LC_MESSAGES/hydrilla-messages.po |
| 9 | 9 |
include tests/source-package-example/* |
| 10 | 10 |
include tests/source-package-example/LICENSES/* |
| PKG-INFO | ||
|---|---|---|
| 1 | 1 |
Metadata-Version: 2.1 |
| 2 | 2 |
Name: hydrilla.builder |
| 3 |
Version: 1.0
|
|
| 3 |
Version: 1.1b1
|
|
| 4 | 4 |
Summary: Hydrilla package builder |
| 5 | 5 |
Home-page: https://git.koszko.org/hydrilla-builder |
| 6 | 6 |
Author: Wojtek Kosior |
| ... | ... | |
| 24 | 24 |
|
| 25 | 25 |
### Build |
| 26 | 26 |
|
| 27 |
* build (a PEP517 package builder) |
|
| 27 | 28 |
* setuptools |
| 28 | 29 |
* wheel |
| 29 | 30 |
* setuptools_scm |
| 30 |
* babel |
|
| 31 |
* babel (Python library)
|
|
| 31 | 32 |
|
| 32 | 33 |
### Test |
| 33 | 34 |
|
| 34 | 35 |
* pytest |
| 35 |
* reuse |
|
| 36 | 36 |
|
| 37 |
## Building & testing |
|
| 37 |
## Building & testing & installation from wheel
|
|
| 38 | 38 |
|
| 39 |
Build and test processed are analogous to those described in the [README of Hydrilla server part](https://git.koszko.org/pydrilla/about).
|
|
| 39 |
Build, test and installation processes are analogous to those described in the [README of Hydrilla server part](https://git.koszko.org/pydrilla/about).
|
|
| 40 | 40 |
|
| 41 | 41 |
## Running |
| 42 | 42 |
|
| ... | ... | |
| 50 | 50 |
``` |
| 51 | 51 |
|
| 52 | 52 |
You might as well like to run from sources, without installation: |
| 53 |
|
|
| 54 |
``` shell |
|
| 53 | 55 |
mkdir /tmp/bananowarzez/ |
| 54 | 56 |
./setup.py compile_catalog # generate the necessary .po files |
| 55 | 57 |
PYTHONPATH=src python3 -m hydrilla.builder -s src/test/source-package-example/ \ |
| ... | ... | |
| 86 | 88 |
Requires-Python: >=3.7 |
| 87 | 89 |
Description-Content-Type: text/markdown |
| 88 | 90 |
Provides-Extra: setup |
| 91 |
Provides-Extra: spdx |
|
| 89 | 92 |
Provides-Extra: test |
| README.md | ||
|---|---|---|
| 15 | 15 |
|
| 16 | 16 |
### Build |
| 17 | 17 |
|
| 18 |
* build (a PEP517 package builder) |
|
| 18 | 19 |
* setuptools |
| 19 | 20 |
* wheel |
| 20 | 21 |
* setuptools_scm |
| 21 |
* babel |
|
| 22 |
* babel (Python library)
|
|
| 22 | 23 |
|
| 23 | 24 |
### Test |
| 24 | 25 |
|
| 25 | 26 |
* pytest |
| 26 |
* reuse |
|
| 27 | 27 |
|
| 28 |
## Building & testing |
|
| 28 |
## Building & testing & installation from wheel
|
|
| 29 | 29 |
|
| 30 |
Build and test processed are analogous to those described in the [README of Hydrilla server part](https://git.koszko.org/pydrilla/about).
|
|
| 30 |
Build, test and installation processes are analogous to those described in the [README of Hydrilla server part](https://git.koszko.org/pydrilla/about).
|
|
| 31 | 31 |
|
| 32 | 32 |
## Running |
| 33 | 33 |
|
| ... | ... | |
| 41 | 41 |
``` |
| 42 | 42 |
|
| 43 | 43 |
You might as well like to run from sources, without installation: |
| 44 |
|
|
| 45 |
``` shell |
|
| 44 | 46 |
mkdir /tmp/bananowarzez/ |
| 45 | 47 |
./setup.py compile_catalog # generate the necessary .po files |
| 46 | 48 |
PYTHONPATH=src python3 -m hydrilla.builder -s src/test/source-package-example/ \ |
| conftest.py | ||
|---|---|---|
| 1 |
# SPDX-License-Identifier: CC0-1.0 |
|
| 2 |
|
|
| 3 |
# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org> |
|
| 4 |
# |
|
| 5 |
# Available under the terms of Creative Commons Zero v1.0 Universal. |
|
| 6 |
|
|
| 7 |
import sys |
|
| 8 |
from pathlib import Path |
|
| 9 |
|
|
| 10 |
import pytest |
|
| 11 |
import pkgutil |
|
| 12 |
import importlib |
|
| 13 |
from tempfile import TemporaryDirectory |
|
| 14 |
from typing import Iterable |
|
| 15 |
|
|
| 16 |
here = Path(__file__).resolve().parent |
|
| 17 |
sys.path.insert(0, str(here / 'src')) |
|
| 18 |
|
|
| 19 |
@pytest.fixture(autouse=True) |
|
| 20 |
def no_requests(monkeypatch): |
|
| 21 |
"""Remove requests.sessions.Session.request for all tests.""" |
|
| 22 |
if importlib.util.find_spec("requests") is not None:
|
|
| 23 |
monkeypatch.delattr('requests.sessions.Session.request')
|
|
| 24 |
|
|
| 25 |
@pytest.fixture |
|
| 26 |
def mock_subprocess_run(monkeypatch, request): |
|
| 27 |
""" |
|
| 28 |
Temporarily replace subprocess.run() with a function supplied through pytest |
|
| 29 |
marker 'subprocess_run'. |
|
| 30 |
|
|
| 31 |
The marker excepts 2 arguments: |
|
| 32 |
* the module inside which the subprocess attribute should be mocked and |
|
| 33 |
* a run() function to use. |
|
| 34 |
""" |
|
| 35 |
where, mocked_run = request.node.get_closest_marker('subprocess_run').args
|
|
| 36 |
|
|
| 37 |
class MockedSubprocess: |
|
| 38 |
"""Minimal mocked version of the subprocess module.""" |
|
| 39 |
run = mocked_run |
|
| 40 |
|
|
| 41 |
monkeypatch.setattr(where, 'subprocess', MockedSubprocess) |
|
| 42 |
|
|
| 43 |
@pytest.fixture(autouse=True) |
|
| 44 |
def no_gettext(monkeypatch, request): |
|
| 45 |
""" |
|
| 46 |
Make gettext return all strings untranslated unless we request otherwise. |
|
| 47 |
""" |
|
| 48 |
if request.node.get_closest_marker('enable_gettext'):
|
|
| 49 |
return |
|
| 50 |
|
|
| 51 |
import hydrilla |
|
| 52 |
modules_to_process = [hydrilla] |
|
| 53 |
|
|
| 54 |
def add_child_modules(parent): |
|
| 55 |
""" |
|
| 56 |
Recursuvely collect all modules descending from 'parent' into an array. |
|
| 57 |
""" |
|
| 58 |
try: |
|
| 59 |
load_paths = parent.__path__ |
|
| 60 |
except AttributeError: |
|
| 61 |
return |
|
| 62 |
|
|
| 63 |
for module_info in pkgutil.iter_modules(load_paths): |
|
| 64 |
if module_info.name != '__main__': |
|
| 65 |
__import__(f'{parent.__name__}.{module_info.name}')
|
|
| 66 |
modules_to_process.append(getattr(parent, module_info.name)) |
|
| 67 |
add_child_modules(getattr(parent, module_info.name)) |
|
| 68 |
|
|
| 69 |
add_child_modules(hydrilla) |
|
| 70 |
|
|
| 71 |
for module in modules_to_process: |
|
| 72 |
if hasattr(module, '_'): |
|
| 73 |
monkeypatch.setattr(module, '_', lambda message: message) |
|
| 74 |
|
|
| 75 |
@pytest.fixture |
|
| 76 |
def tmpdir() -> Iterable[Path]: |
|
| 77 |
""" |
|
| 78 |
Provide test case with a temporary directory that will be automatically |
|
| 79 |
deleted after the test. |
|
| 80 |
""" |
|
| 81 |
with TemporaryDirectory() as tmpdir: |
|
| 82 |
yield Path(tmpdir) |
|
| doc/man/man1/hydrilla-builder.1 | ||
|---|---|---|
| 6 | 6 |
.\" |
| 7 | 7 |
.\" Available under the terms of Creative Commons Zero v1.0 Universal. |
| 8 | 8 |
|
| 9 |
.TH HYDRILLA-BUILDER 1 2022-04-22 "Hydrilla 1.0" "Hydrilla Manual"
|
|
| 9 |
.TH HYDRILLA-BUILDER 1 2022-06-14 "Hydrilla 1.1" "Hydrilla Manual"
|
|
| 10 | 10 |
|
| 11 | 11 |
.SH NAME |
| 12 |
hydrilla-builder \- Generate packages to be served by Hydrilla |
|
| 12 |
hydrilla\-builder \- Generate packages to be served by Hydrilla
|
|
| 13 | 13 |
|
| 14 | 14 |
.SH SYNOPSIS |
| 15 | 15 |
.B "hydrilla\-builder \-\-help" |
| ... | ... | |
| 21 | 21 |
names.) |
| 22 | 22 |
|
| 23 | 23 |
.SH DESCRIPTION |
| 24 |
.I hydrilla-builder |
|
| 24 |
.I hydrilla\-builder
|
|
| 25 | 25 |
is a tool which takes a Hydrilla source package and generates files of a |
| 26 | 26 |
built package, suitable for serving by the Hydrilla server. |
| 27 | 27 |
|
| 28 |
As of Hydrilla version 1.0 |
|
| 29 |
.I hydrilla-builder |
|
| 30 |
does not yet perform nor trigger actions like compilation, minification or |
|
| 31 |
bundling of source code files. Its main function is to automate the process |
|
| 32 |
of computing SHA256 cryptographic sums of package files and including them |
|
| 33 |
in JSON definitions. |
|
| 28 |
The main function of |
|
| 29 |
.I hydrilla\-builder |
|
| 30 |
is to automate the process of computing SHA256 cryptographic sums of package |
|
| 31 |
files and including them in JSON definitions. |
|
| 32 |
|
|
| 33 |
This tool does not perform nor trigger actions like compilation, minification or |
|
| 34 |
bundling of source code files. When this is needed, |
|
| 35 |
.I hydrilla\-builder |
|
| 36 |
instead relies on facilities already provided by other software distribution |
|
| 37 |
systems like APT and extracts the requested files from .deb packages. This |
|
| 38 |
feature is called \*(lqpiggybacking\*(rq. |
|
| 34 | 39 |
|
| 35 | 40 |
In addition, |
| 36 |
.B hydrilla\-builder
|
|
| 41 |
.I hydrilla\-builder
|
|
| 37 | 42 |
can generate an SPDX report from source package if the |
| 38 | 43 |
\*(lqreuse_generate_spdx_report\*(rq property is set to true in index.json. |
| 39 | 44 |
|
| ... | ... | |
| 64 | 69 |
\*(lqindex.json\*(rq, substituting any file with such name that could be |
| 65 | 70 |
present in the source directory. |
| 66 | 71 |
|
| 72 |
.TP |
|
| 73 |
.BI \-p " PIGGYBACK_PATH" "\fR,\fP \-\^\-piggyback\-files=" PIGGYBACK_PATH |
|
| 74 |
Read and write foreign package archives under |
|
| 75 |
.IR PIGGYBACK_PATH . |
|
| 76 |
If not specified, a default value is computed by appending |
|
| 77 |
\*(lq.foreign-packages\*(rq to the |
|
| 78 |
.I SOURCE |
|
| 79 |
directory path. |
|
| 80 |
|
|
| 67 | 81 |
.TP |
| 68 | 82 |
.BI \-d " DESTINATION" "\fR,\fP \-\^\-dstdir=" DESTINATION |
| 69 | 83 |
Write generated files under |
| 70 | 84 |
.IR DESTINATION . |
| 71 |
Files are written in such way that |
|
| 72 | 85 |
.I DESTINATION |
| 73 |
is valid for being passed to Hydrilla to serve packages from.
|
|
| 86 |
can then be passed to Hydrilla to serve packages from.
|
|
| 74 | 87 |
|
| 75 | 88 |
.TP |
| 76 | 89 |
.B \-\^\-version |
| 77 | 90 |
Show version information for this instance of |
| 78 |
.I hydrilla-builder |
|
| 91 |
.I hydrilla\-builder
|
|
| 79 | 92 |
on the standard output and exit successfully. |
| 80 | 93 |
|
| 81 | 94 |
.SH "EXIT STATUS" |
| pyproject.toml | ||
|---|---|---|
| 13 | 13 |
|
| 14 | 14 |
[tool.pytest.ini_options] |
| 15 | 15 |
minversion = "6.0" |
| 16 |
addopts = "-ra -q"
|
|
| 16 |
addopts = "-ra" |
|
| 17 | 17 |
testpaths = [ |
| 18 | 18 |
"tests" |
| 19 | 19 |
] |
| 20 |
markers = [ |
|
| 21 |
"subprocess_run: define how mocked subprocess.run should behave" |
|
| 22 |
] |
|
| setup.cfg | ||
|---|---|---|
| 24 | 24 |
zip_safe = False |
| 25 | 25 |
package_dir = |
| 26 | 26 |
= src |
| 27 |
packages = find: |
|
| 27 |
packages = find_namespace:
|
|
| 28 | 28 |
include_package_data = True |
| 29 | 29 |
python_requires = >= 3.7 |
| 30 | 30 |
install_requires = |
| ... | ... | |
| 36 | 36 |
|
| 37 | 37 |
[options.extras_require] |
| 38 | 38 |
test = pytest |
| 39 |
setup = setuptools_scm |
|
| 39 |
setup = setuptools_scm; babel |
|
| 40 |
spdx = reuse |
|
| 40 | 41 |
|
| 41 | 42 |
[options.packages.find] |
| 42 | 43 |
where = src |
| setup.py | ||
|---|---|---|
| 8 | 8 |
import setuptools |
| 9 | 9 |
|
| 10 | 10 |
from setuptools.command.build_py import build_py |
| 11 |
from setuptools.command.sdist import sdist |
|
| 12 |
|
|
| 13 |
from pathlib import Path |
|
| 14 |
|
|
| 15 |
here = Path(__file__).resolve().parent |
|
| 11 | 16 |
|
| 12 | 17 |
class CustomBuildCommand(build_py): |
| 13 |
''' |
|
| 14 |
The build command but runs babel before build. |
|
| 15 |
''' |
|
| 18 |
"""The build command but runs babel before build.""" |
|
| 16 | 19 |
def run(self, *args, **kwargs): |
| 20 |
"""Wrapper around build_py's original run() method.""" |
|
| 17 | 21 |
self.run_command('compile_catalog')
|
| 22 |
|
|
| 23 |
super().run(*args, **kwargs) |
|
| 24 |
|
|
| 25 |
class CustomSdistCommand(sdist): |
|
| 26 |
""" |
|
| 27 |
The sdist command but prevents compiled message catalogs from being included |
|
| 28 |
in the archive. |
|
| 29 |
""" |
|
| 30 |
def run(self, *args, **kwargs): |
|
| 31 |
"""Wrapper around sdist's original run() method.""" |
|
| 32 |
locales_dir = here / 'src/hydrilla/builder/locales' |
|
| 33 |
locale_files = {}
|
|
| 34 |
|
|
| 35 |
for path in locales_dir.rglob('*.mo'):
|
|
| 36 |
locale_files[path] = path.read_bytes() |
|
| 37 |
|
|
| 38 |
for path in locale_files: |
|
| 39 |
path.unlink() |
|
| 40 |
|
|
| 18 | 41 |
super().run(*args, **kwargs) |
| 19 | 42 |
|
| 20 |
setuptools.setup(cmdclass={'build_py': CustomBuildCommand})
|
|
| 43 |
for path, contents in locale_files.items(): |
|
| 44 |
path.write_bytes(contents) |
|
| 45 |
|
|
| 46 |
setuptools.setup(cmdclass = {
|
|
| 47 |
'build_py': CustomBuildCommand, |
|
| 48 |
'sdist': CustomSdistCommand |
|
| 49 |
}) |
|
| src/hydrilla.builder.egg-info/PKG-INFO | ||
|---|---|---|
| 1 | 1 |
Metadata-Version: 2.1 |
| 2 | 2 |
Name: hydrilla.builder |
| 3 |
Version: 1.0
|
|
| 3 |
Version: 1.1b1
|
|
| 4 | 4 |
Summary: Hydrilla package builder |
| 5 | 5 |
Home-page: https://git.koszko.org/hydrilla-builder |
| 6 | 6 |
Author: Wojtek Kosior |
| ... | ... | |
| 24 | 24 |
|
| 25 | 25 |
### Build |
| 26 | 26 |
|
| 27 |
* build (a PEP517 package builder) |
|
| 27 | 28 |
* setuptools |
| 28 | 29 |
* wheel |
| 29 | 30 |
* setuptools_scm |
| 30 |
* babel |
|
| 31 |
* babel (Python library)
|
|
| 31 | 32 |
|
| 32 | 33 |
### Test |
| 33 | 34 |
|
| 34 | 35 |
* pytest |
| 35 |
* reuse |
|
| 36 | 36 |
|
| 37 |
## Building & testing |
|
| 37 |
## Building & testing & installation from wheel
|
|
| 38 | 38 |
|
| 39 |
Build and test processed are analogous to those described in the [README of Hydrilla server part](https://git.koszko.org/pydrilla/about).
|
|
| 39 |
Build, test and installation processes are analogous to those described in the [README of Hydrilla server part](https://git.koszko.org/pydrilla/about).
|
|
| 40 | 40 |
|
| 41 | 41 |
## Running |
| 42 | 42 |
|
| ... | ... | |
| 50 | 50 |
``` |
| 51 | 51 |
|
| 52 | 52 |
You might as well like to run from sources, without installation: |
| 53 |
|
|
| 54 |
``` shell |
|
| 53 | 55 |
mkdir /tmp/bananowarzez/ |
| 54 | 56 |
./setup.py compile_catalog # generate the necessary .po files |
| 55 | 57 |
PYTHONPATH=src python3 -m hydrilla.builder -s src/test/source-package-example/ \ |
| ... | ... | |
| 86 | 88 |
Requires-Python: >=3.7 |
| 87 | 89 |
Description-Content-Type: text/markdown |
| 88 | 90 |
Provides-Extra: setup |
| 91 |
Provides-Extra: spdx |
|
| 89 | 92 |
Provides-Extra: test |
| src/hydrilla.builder.egg-info/SOURCES.txt | ||
|---|---|---|
| 1 | 1 |
MANIFEST.in |
| 2 | 2 |
README.md |
| 3 | 3 |
README.md.license |
| 4 |
conftest.py |
|
| 4 | 5 |
pyproject.toml |
| 5 | 6 |
setup.cfg |
| 6 | 7 |
setup.py |
| ... | ... | |
| 21 | 22 |
src/hydrilla/builder/__main__.py |
| 22 | 23 |
src/hydrilla/builder/_version.py |
| 23 | 24 |
src/hydrilla/builder/build.py |
| 25 |
src/hydrilla/builder/common_errors.py |
|
| 26 |
src/hydrilla/builder/local_apt.py |
|
| 27 |
src/hydrilla/builder/piggybacking.py |
|
| 24 | 28 |
src/hydrilla/builder/locales/en_US/LC_MESSAGES/hydrilla-messages.po |
| 25 |
src/hydrilla/schemas/api_mapping_description-1.0.1.schema.json |
|
| 26 |
src/hydrilla/schemas/api_mapping_description-1.0.1.schema.json.license |
|
| 27 |
src/hydrilla/schemas/api_query_result-1.0.1.schema.json |
|
| 28 |
src/hydrilla/schemas/api_query_result-1.0.1.schema.json.license |
|
| 29 |
src/hydrilla/schemas/api_resource_description-1.0.1.schema.json |
|
| 30 |
src/hydrilla/schemas/api_resource_description-1.0.1.schema.json.license |
|
| 31 |
src/hydrilla/schemas/api_source_description-1.0.1.schema.json |
|
| 32 |
src/hydrilla/schemas/api_source_description-1.0.1.schema.json.license |
|
| 33 |
src/hydrilla/schemas/common_definitions-1.0.1.schema.json |
|
| 34 |
src/hydrilla/schemas/common_definitions-1.0.1.schema.json.license |
|
| 35 |
src/hydrilla/schemas/package_source-1.0.1.schema.json |
|
| 36 |
src/hydrilla/schemas/package_source-1.0.1.schema.json.license |
|
| 29 |
src/hydrilla/schemas/1.x/api_mapping_description-1.0.1.schema.json |
|
| 30 |
src/hydrilla/schemas/1.x/api_mapping_description-1.0.1.schema.json.license |
|
| 31 |
src/hydrilla/schemas/1.x/api_query_result-1.0.1.schema.json |
|
| 32 |
src/hydrilla/schemas/1.x/api_query_result-1.0.1.schema.json.license |
|
| 33 |
src/hydrilla/schemas/1.x/api_resource_description-1.0.1.schema.json |
|
| 34 |
src/hydrilla/schemas/1.x/api_resource_description-1.0.1.schema.json.license |
|
| 35 |
src/hydrilla/schemas/1.x/api_source_description-1.0.1.schema.json |
|
| 36 |
src/hydrilla/schemas/1.x/api_source_description-1.0.1.schema.json.license |
|
| 37 |
src/hydrilla/schemas/1.x/common_definitions-1.0.1.schema.json |
|
| 38 |
src/hydrilla/schemas/1.x/common_definitions-1.0.1.schema.json.license |
|
| 39 |
src/hydrilla/schemas/1.x/package_source-1.0.1.schema.json |
|
| 40 |
src/hydrilla/schemas/1.x/package_source-1.0.1.schema.json.license |
|
| 41 |
src/hydrilla/schemas/2.x/api_mapping_description-2.schema.json |
|
| 42 |
src/hydrilla/schemas/2.x/api_mapping_description-2.schema.json.license |
|
| 43 |
src/hydrilla/schemas/2.x/api_query_result-2.schema.json |
|
| 44 |
src/hydrilla/schemas/2.x/api_query_result-2.schema.json.license |
|
| 45 |
src/hydrilla/schemas/2.x/api_resource_description-2.schema.json |
|
| 46 |
src/hydrilla/schemas/2.x/api_resource_description-2.schema.json.license |
|
| 47 |
src/hydrilla/schemas/2.x/api_source_description-2.schema.json |
|
| 48 |
src/hydrilla/schemas/2.x/api_source_description-2.schema.json.license |
|
| 49 |
src/hydrilla/schemas/2.x/common_definitions-2.schema.json |
|
| 50 |
src/hydrilla/schemas/2.x/common_definitions-2.schema.json.license |
|
| 51 |
src/hydrilla/schemas/2.x/package_source-2.schema.json |
|
| 52 |
src/hydrilla/schemas/2.x/package_source-2.schema.json.license |
|
| 37 | 53 |
src/hydrilla/util/__init__.py |
| 38 | 54 |
src/hydrilla/util/_util.py |
| 39 |
tests/test_hydrilla_builder.py |
|
| 55 |
tests/__init__.py |
|
| 56 |
tests/helpers.py |
|
| 57 |
tests/test_build.py |
|
| 58 |
tests/test_local_apt.py |
|
| 40 | 59 |
tests/source-package-example/README.txt |
| 41 | 60 |
tests/source-package-example/README.txt.license |
| 42 | 61 |
tests/source-package-example/bye.js |
| src/hydrilla.builder.egg-info/requires.txt | ||
|---|---|---|
| 2 | 2 |
jsonschema>=3.0 |
| 3 | 3 |
|
| 4 | 4 |
[setup] |
| 5 |
babel |
|
| 5 | 6 |
setuptools_scm |
| 6 | 7 |
|
| 8 |
[spdx] |
|
| 9 |
reuse |
|
| 10 |
|
|
| 7 | 11 |
[test] |
| 8 | 12 |
pytest |
| src/hydrilla/builder/_version.py | ||
|---|---|---|
| 1 | 1 |
# coding: utf-8 |
| 2 | 2 |
# file generated by setuptools_scm |
| 3 | 3 |
# don't change, don't track in version control |
| 4 |
version = '1.0' |
|
| 5 |
version_tuple = (1, 0) |
|
| 4 |
version = '1.1b1' |
|
| 5 |
version_tuple = (1, '1b1') |
|
| src/hydrilla/builder/build.py | ||
|---|---|---|
| 30 | 30 |
import json |
| 31 | 31 |
import re |
| 32 | 32 |
import zipfile |
| 33 |
from pathlib import Path |
|
| 33 |
import subprocess |
|
| 34 |
from pathlib import Path, PurePosixPath |
|
| 34 | 35 |
from hashlib import sha256 |
| 35 | 36 |
from sys import stderr |
| 37 |
from contextlib import contextmanager |
|
| 38 |
from tempfile import TemporaryDirectory, TemporaryFile |
|
| 39 |
from typing import Optional, Iterable, Union |
|
| 36 | 40 |
|
| 37 | 41 |
import jsonschema |
| 38 | 42 |
import click |
| 39 | 43 |
|
| 40 | 44 |
from .. import util |
| 41 | 45 |
from . import _version |
| 46 |
from . import local_apt |
|
| 47 |
from .piggybacking import Piggybacked |
|
| 48 |
from .common_errors import * |
|
| 42 | 49 |
|
| 43 | 50 |
here = Path(__file__).resolve().parent |
| 44 | 51 |
|
| 45 | 52 |
_ = util.translation(here / 'locales').gettext |
| 46 | 53 |
|
| 47 |
index_validator = util.validator_for('package_source-1.0.1.schema.json')
|
|
| 48 |
|
|
| 49 | 54 |
schemas_root = 'https://hydrilla.koszko.org/schemas' |
| 50 | 55 |
|
| 51 | 56 |
generated_by = {
|
| ... | ... | |
| 53 | 58 |
'version': _version.version |
| 54 | 59 |
} |
| 55 | 60 |
|
| 56 |
class FileReferenceError(Exception): |
|
| 57 |
""" |
|
| 58 |
Exception used to report various problems concerning files referenced from |
|
| 59 |
source package's index.json. |
|
| 60 |
""" |
|
| 61 |
|
|
| 62 |
class ReuseError(Exception): |
|
| 61 |
class ReuseError(SubprocessError): |
|
| 63 | 62 |
""" |
| 64 | 63 |
Exception used to report various problems when calling the REUSE tool. |
| 65 | 64 |
""" |
| 66 | 65 |
|
| 67 |
class FileBuffer: |
|
| 68 |
""" |
|
| 69 |
Implement a file-like object that buffers data written to it. |
|
| 70 |
""" |
|
| 71 |
def __init__(self): |
|
| 72 |
""" |
|
| 73 |
Initialize FileBuffer. |
|
| 74 |
""" |
|
| 75 |
self.chunks = [] |
|
| 76 |
|
|
| 77 |
def write(self, b): |
|
| 78 |
""" |
|
| 79 |
Buffer 'b', return number of bytes buffered. |
|
| 80 |
|
|
| 81 |
'b' is expected to be an instance of 'bytes' or 'str', in which case it |
|
| 82 |
gets encoded as UTF-8. |
|
| 83 |
""" |
|
| 84 |
if type(b) is str: |
|
| 85 |
b = b.encode() |
|
| 86 |
self.chunks.append(b) |
|
| 87 |
return len(b) |
|
| 88 |
|
|
| 89 |
def flush(self): |
|
| 90 |
""" |
|
| 91 |
A no-op mock of file-like object's flush() method. |
|
| 92 |
""" |
|
| 93 |
pass |
|
| 94 |
|
|
| 95 |
def get_bytes(self): |
|
| 96 |
""" |
|
| 97 |
Return all data written so far concatenated into a single 'bytes' |
|
| 98 |
object. |
|
| 99 |
""" |
|
| 100 |
return b''.join(self.chunks) |
|
| 101 |
|
|
| 102 |
def generate_spdx_report(root): |
|
| 66 |
def generate_spdx_report(root: Path) -> bytes: |
|
| 103 | 67 |
""" |
| 104 | 68 |
Use REUSE tool to generate an SPDX report for sources under 'root' and |
| 105 | 69 |
return the report's contents as 'bytes'. |
| 106 | 70 |
|
| 107 |
'root' shall be an instance of pathlib.Path. |
|
| 108 |
|
|
| 109 | 71 |
In case the directory tree under 'root' does not constitute a |
| 110 |
REUSE-compliant package, linting report is printed to standard output and
|
|
| 111 |
an exception is raised.
|
|
| 72 |
REUSE-compliant package, as exception is raised with linting report
|
|
| 73 |
included in it.
|
|
| 112 | 74 |
|
| 113 |
In case the reuse package is not installed, an exception is also raised.
|
|
| 75 |
In case the reuse tool is not installed, an exception is also raised.
|
|
| 114 | 76 |
""" |
| 115 |
try: |
|
| 116 |
from reuse._main import main as reuse_main |
|
| 117 |
except ModuleNotFoundError: |
|
| 118 |
raise ReuseError(_('couldnt_import_reuse_is_it_installed'))
|
|
| 119 |
|
|
| 120 |
mocked_output = FileBuffer() |
|
| 121 |
if reuse_main(args=['--root', str(root), 'lint'], out=mocked_output) != 0: |
|
| 122 |
stderr.write(mocked_output.get_bytes().decode()) |
|
| 123 |
raise ReuseError(_('spdx_report_from_reuse_incompliant'))
|
|
| 124 |
|
|
| 125 |
mocked_output = FileBuffer() |
|
| 126 |
if reuse_main(args=['--root', str(root), 'spdx'], out=mocked_output) != 0: |
|
| 127 |
stderr.write(mocked_output.get_bytes().decode()) |
|
| 128 |
raise ReuseError("Couldn't generate an SPDX report for package.")
|
|
| 129 |
|
|
| 130 |
return mocked_output.get_bytes() |
|
| 77 |
for command in [ |
|
| 78 |
['reuse', '--root', str(root), 'lint'], |
|
| 79 |
['reuse', '--root', str(root), 'spdx'] |
|
| 80 |
]: |
|
| 81 |
try: |
|
| 82 |
cp = subprocess.run(command, capture_output=True, text=True) |
|
| 83 |
except FileNotFoundError: |
|
| 84 |
msg = _('couldnt_execute_{}_is_it_installed').format('reuse')
|
|
| 85 |
raise ReuseError(msg) |
|
| 86 |
|
|
| 87 |
if cp.returncode != 0: |
|
| 88 |
msg = _('command_{}_failed').format(' '.join(command))
|
|
| 89 |
raise ReuseError(msg, cp) |
|
| 90 |
|
|
| 91 |
return cp.stdout.encode() |
|
| 131 | 92 |
|
| 132 | 93 |
class FileRef: |
| 133 | 94 |
"""Represent reference to a file in the package.""" |
| 134 |
def __init__(self, path: Path, contents: bytes):
|
|
| 95 |
def __init__(self, path: PurePosixPath, contents: bytes) -> None:
|
|
| 135 | 96 |
"""Initialize FileRef.""" |
| 136 |
self.include_in_distribution = False |
|
| 137 |
self.include_in_zipfile = True
|
|
| 138 |
self.path = path |
|
| 139 |
self.contents = contents |
|
| 97 |
self.include_in_distribution = False
|
|
| 98 |
self.include_in_source_archive = True
|
|
| 99 |
self.path = path
|
|
| 100 |
self.contents = contents
|
|
| 140 | 101 |
|
| 141 | 102 |
self.contents_hash = sha256(contents).digest().hex() |
| 142 | 103 |
|
| 143 |
def make_ref_dict(self, filename: str):
|
|
| 104 |
def make_ref_dict(self) -> dict[str, str]:
|
|
| 144 | 105 |
""" |
| 145 | 106 |
Represent the file reference through a dict that can be included in JSON |
| 146 | 107 |
defintions. |
| 147 | 108 |
""" |
| 148 | 109 |
return {
|
| 149 |
'file': filename,
|
|
| 110 |
'file': str(self.path),
|
|
| 150 | 111 |
'sha256': self.contents_hash |
| 151 | 112 |
} |
| 152 | 113 |
|
| 114 |
@contextmanager |
|
| 115 |
def piggybacked_system(piggyback_def: Optional[dict], |
|
| 116 |
piggyback_files: Optional[Path]) \ |
|
| 117 |
-> Iterable[Piggybacked]: |
|
| 118 |
""" |
|
| 119 |
Resolve resources from a foreign software packaging system. Optionally, use |
|
| 120 |
package files (.deb's, etc.) from a specified directory instead of resolving |
|
| 121 |
and downloading them. |
|
| 122 |
""" |
|
| 123 |
if piggyback_def is None: |
|
| 124 |
yield Piggybacked() |
|
| 125 |
else: |
|
| 126 |
# apt is the only supported system right now |
|
| 127 |
assert piggyback_def['system'] == 'apt' |
|
| 128 |
|
|
| 129 |
with local_apt.piggybacked_system(piggyback_def, piggyback_files) \ |
|
| 130 |
as piggybacked: |
|
| 131 |
yield piggybacked |
|
| 132 |
|
|
| 153 | 133 |
class Build: |
| 154 | 134 |
""" |
| 155 | 135 |
Build a Hydrilla package. |
| 156 | 136 |
""" |
| 157 |
def __init__(self, srcdir, index_json_path): |
|
| 137 |
def __init__(self, srcdir: Path, index_json_path: Path, |
|
| 138 |
piggyback_files: Optional[Path]=None): |
|
| 158 | 139 |
""" |
| 159 | 140 |
Initialize a build. All files to be included in a distribution package |
| 160 | 141 |
are loaded into memory, all data gets validated and all necessary |
| 161 | 142 |
computations (e.g. preparing of hashes) are performed. |
| 162 |
|
|
| 163 |
'srcdir' and 'index_json' are expected to be pathlib.Path objects. |
|
| 164 | 143 |
""" |
| 165 | 144 |
self.srcdir = srcdir.resolve() |
| 166 |
self.index_json_path = index_json_path |
|
| 145 |
self.piggyback_files = piggyback_files |
|
| 146 |
if piggyback_files is None: |
|
| 147 |
piggyback_default_path = \ |
|
| 148 |
srcdir.parent / f'{srcdir.name}.foreign-packages'
|
|
| 149 |
if piggyback_default_path.exists(): |
|
| 150 |
self.piggyback_files = piggyback_default_path |
|
| 167 | 151 |
self.files_by_path = {}
|
| 168 | 152 |
self.resource_list = [] |
| 169 | 153 |
self.mapping_list = [] |
| 170 | 154 |
|
| 171 | 155 |
if not index_json_path.is_absolute(): |
| 172 |
self.index_json_path = (self.srcdir / self.index_json_path)
|
|
| 156 |
index_json_path = (self.srcdir / index_json_path)
|
|
| 173 | 157 |
|
| 174 |
self.index_json_path = self.index_json_path.resolve()
|
|
| 158 |
index_obj, major = util.load_instance_from_file(index_json_path)
|
|
| 175 | 159 |
|
| 176 |
with open(self.index_json_path, 'rt') as index_file: |
|
| 177 |
index_json_text = index_file.read() |
|
| 160 |
if major not in (1, 2): |
|
| 161 |
msg = _('unknown_schema_package_source_{}')\
|
|
| 162 |
.format(index_json_path) |
|
| 163 |
raise util.UnknownSchemaError(msg) |
|
| 178 | 164 |
|
| 179 |
index_obj = json.loads(util.strip_json_comments(index_json_text)) |
|
| 165 |
index_desired_path = PurePosixPath('index.json')
|
|
| 166 |
self.files_by_path[index_desired_path] = \ |
|
| 167 |
FileRef(index_desired_path, index_json_path.read_bytes()) |
|
| 180 | 168 |
|
| 181 |
self.files_by_path[self.srcdir / 'index.json'] = \ |
|
| 182 |
FileRef(self.srcdir / 'index.json', index_json_text.encode()) |
|
| 169 |
self._process_index_json(index_obj, major) |
|
| 183 | 170 |
|
| 184 |
self._process_index_json(index_obj)
|
|
| 185 |
|
|
| 186 |
def _process_file(self, filename: str, include_in_distribution: bool=True):
|
|
| 171 |
def _process_file(self, filename: Union[str, PurePosixPath],
|
|
| 172 |
piggybacked: Piggybacked, |
|
| 173 |
include_in_distribution: bool=True):
|
|
| 187 | 174 |
""" |
| 188 | 175 |
Resolve 'filename' relative to srcdir, load it to memory (if not loaded |
| 189 | 176 |
before), compute its hash and store its information in |
| 190 | 177 |
'self.files_by_path'. |
| 191 | 178 |
|
| 192 |
'filename' shall represent a relative path using '/' as a separator.
|
|
| 179 |
'filename' shall represent a relative path withing package directory.
|
|
| 193 | 180 |
|
| 194 | 181 |
if 'include_in_distribution' is True it shall cause the file to not only |
| 195 | 182 |
be included in the source package's zipfile, but also written as one of |
| 196 | 183 |
built package's files. |
| 197 | 184 |
|
| 185 |
For each file an attempt is made to resolve it using 'piggybacked' |
|
| 186 |
object. If a file is found and pulled from foreign software packaging |
|
| 187 |
system this way, it gets automatically excluded from inclusion in |
|
| 188 |
Hydrilla source package's zipfile. |
|
| 189 |
|
|
| 198 | 190 |
Return file's reference object that can be included in JSON defintions |
| 199 | 191 |
of various kinds. |
| 200 | 192 |
""" |
| 201 |
path = self.srcdir |
|
| 202 |
for segment in filename.split('/'):
|
|
| 203 |
path /= segment |
|
| 204 |
|
|
| 205 |
path = path.resolve() |
|
| 206 |
if not path.is_relative_to(self.srcdir): |
|
| 207 |
raise FileReferenceError(_('loading_{}_outside_package_dir')
|
|
| 208 |
.format(filename)) |
|
| 209 |
|
|
| 210 |
if str(path.relative_to(self.srcdir)) == 'index.json': |
|
| 211 |
raise FileReferenceError(_('loading_reserved_index_json'))
|
|
| 193 |
include_in_source_archive = True |
|
| 194 |
|
|
| 195 |
desired_path = PurePosixPath(filename) |
|
| 196 |
if '..' in desired_path.parts: |
|
| 197 |
msg = _('path_contains_double_dot_{}').format(filename)
|
|
| 198 |
raise FileReferenceError(msg) |
|
| 199 |
|
|
| 200 |
path = piggybacked.resolve_file(desired_path) |
|
| 201 |
if path is None: |
|
| 202 |
path = (self.srcdir / desired_path).resolve() |
|
| 203 |
try: |
|
| 204 |
rel_path = path.relative_to(self.srcdir) |
|
| 205 |
except ValueError: |
|
| 206 |
raise FileReferenceError(_('loading_{}_outside_package_dir')
|
|
| 207 |
.format(filename)) |
|
| 208 |
|
|
| 209 |
if str(rel_path) == 'index.json': |
|
| 210 |
raise FileReferenceError(_('loading_reserved_index_json'))
|
|
| 211 |
else: |
|
| 212 |
include_in_source_archive = False |
|
| 212 | 213 |
|
| 213 |
file_ref = self.files_by_path.get(path) |
|
| 214 |
file_ref = self.files_by_path.get(desired_path)
|
|
| 214 | 215 |
if file_ref is None: |
| 215 |
with open(path, 'rb') as file_handle: |
|
| 216 |
contents = file_handle.read() |
|
| 216 |
if not path.is_file(): |
|
| 217 |
msg = _('referenced_file_{}_missing').format(desired_path)
|
|
| 218 |
raise FileReferenceError(msg) |
|
| 217 | 219 |
|
| 218 |
file_ref = FileRef(path, contents)
|
|
| 219 |
self.files_by_path[path] = file_ref |
|
| 220 |
file_ref = FileRef(desired_path, path.read_bytes())
|
|
| 221 |
self.files_by_path[desired_path] = file_ref
|
|
| 220 | 222 |
|
| 221 | 223 |
if include_in_distribution: |
| 222 | 224 |
file_ref.include_in_distribution = True |
| 223 | 225 |
|
| 224 |
return file_ref.make_ref_dict(filename) |
|
| 226 |
if not include_in_source_archive: |
|
| 227 |
file_ref.include_in_source_archive = False |
|
| 228 |
|
|
| 229 |
return file_ref.make_ref_dict() |
|
| 225 | 230 |
|
| 226 |
def _prepare_source_package_zip(self, root_dir_name: str): |
|
| 231 |
def _prepare_source_package_zip(self, source_name: str, |
|
| 232 |
piggybacked: Piggybacked) -> str: |
|
| 227 | 233 |
""" |
| 228 | 234 |
Create and store in memory a .zip archive containing files needed to |
| 229 | 235 |
build this source package. |
| 230 | 236 |
|
| 231 |
'root_dir_name' shall not contain any slashes ('/').
|
|
| 237 |
'src_dir_name' shall not contain any slashes ('/').
|
|
| 232 | 238 |
|
| 233 | 239 |
Return zipfile's sha256 sum's hexstring. |
| 234 | 240 |
""" |
| 235 |
fb = FileBuffer() |
|
| 236 |
root_dir_path = Path(root_dir_name) |
|
| 241 |
tf = TemporaryFile() |
|
| 242 |
source_dir_path = PurePosixPath(source_name) |
|
| 243 |
piggybacked_dir_path = PurePosixPath(f'{source_name}.foreign-packages')
|
|
| 237 | 244 |
|
| 238 |
def zippath(file_path): |
|
| 239 |
file_path = root_dir_path / file_path.relative_to(self.srcdir) |
|
| 240 |
return file_path.as_posix() |
|
| 241 |
|
|
| 242 |
with zipfile.ZipFile(fb, 'w') as xpi: |
|
| 245 |
with zipfile.ZipFile(tf, 'w') as zf: |
|
| 243 | 246 |
for file_ref in self.files_by_path.values(): |
| 244 |
if file_ref.include_in_zipfile: |
|
| 245 |
xpi.writestr(zippath(file_ref.path), file_ref.contents) |
|
| 247 |
if file_ref.include_in_source_archive: |
|
| 248 |
zf.writestr(str(source_dir_path / file_ref.path), |
|
| 249 |
file_ref.contents) |
|
| 250 |
|
|
| 251 |
for desired_path, real_path in piggybacked.archive_files(): |
|
| 252 |
zf.writestr(str(piggybacked_dir_path / desired_path), |
|
| 253 |
real_path.read_bytes()) |
|
| 246 | 254 |
|
| 247 |
self.source_zip_contents = fb.get_bytes() |
|
| 255 |
tf.seek(0) |
|
| 256 |
self.source_zip_contents = tf.read() |
|
| 248 | 257 |
|
| 249 | 258 |
return sha256(self.source_zip_contents).digest().hex() |
| 250 | 259 |
|
| 251 |
def _process_item(self, item_def: dict): |
|
| 260 |
def _process_item(self, as_what: str, item_def: dict, |
|
| 261 |
piggybacked: Piggybacked): |
|
| 252 | 262 |
""" |
| 253 |
Process 'item_def' as definition of a resource/mapping and store in |
|
| 254 |
memory its processed form and files used by it. |
|
| 263 |
Process 'item_def' as definition of a resource or mapping (determined by |
|
| 264 |
'as_what' param) and store in memory its processed form and files used |
|
| 265 |
by it. |
|
| 255 | 266 |
|
| 256 | 267 |
Return a minimal item reference suitable for using in source |
| 257 | 268 |
description. |
| 258 | 269 |
""" |
| 259 |
copy_props = ['type', 'identifier', 'long_name', 'description']
|
|
| 260 |
for prop in ('comment', 'uuid'):
|
|
| 261 |
if prop in item_def:
|
|
| 262 |
copy_props.append(prop)
|
|
| 270 |
resulting_schema_version = [1]
|
|
| 271 |
|
|
| 272 |
copy_props = ['identifier', 'long_name', 'description',
|
|
| 273 |
*filter(lambda p: p in item_def, ('comment', 'uuid'))]
|
|
| 263 | 274 |
|
| 264 |
if item_def['type'] == 'resource':
|
|
| 275 |
if as_what == 'resource':
|
|
| 265 | 276 |
item_list = self.resource_list |
| 266 | 277 |
|
| 267 | 278 |
copy_props.append('revision')
|
| 268 | 279 |
|
| 269 |
script_file_refs = [self._process_file(f['file']) |
|
| 280 |
script_file_refs = [self._process_file(f['file'], piggybacked)
|
|
| 270 | 281 |
for f in item_def.get('scripts', [])]
|
| 271 | 282 |
|
| 272 | 283 |
deps = [{'identifier': res_ref['identifier']}
|
| 273 | 284 |
for res_ref in item_def.get('dependencies', [])]
|
| 274 | 285 |
|
| 275 | 286 |
new_item_obj = {
|
| 276 |
'dependencies': deps,
|
|
| 287 |
'dependencies': [*piggybacked.resource_must_depend, *deps],
|
|
| 277 | 288 |
'scripts': script_file_refs |
| 278 | 289 |
} |
| 279 | 290 |
else: |
| ... | ... | |
| 287 | 298 |
'payloads': payloads |
| 288 | 299 |
} |
| 289 | 300 |
|
| 290 |
new_item_obj.update([(p, item_def[p]) for p in copy_props]) |
|
| 291 |
|
|
| 292 | 301 |
new_item_obj['version'] = util.normalize_version(item_def['version']) |
| 293 |
new_item_obj['$schema'] = f'{schemas_root}/api_{item_def["type"]}_description-1.schema.json'
|
|
| 302 |
|
|
| 303 |
if as_what == 'mapping' and item_def['type'] == "mapping_and_resource": |
|
| 304 |
new_item_obj['version'].append(item_def['revision']) |
|
| 305 |
|
|
| 306 |
if self.source_schema_ver >= [2]: |
|
| 307 |
# handle 'required_mappings' field |
|
| 308 |
required = [{'identifier': map_ref['identifier']}
|
|
| 309 |
for map_ref in item_def.get('required_mappings', [])]
|
|
| 310 |
if required: |
|
| 311 |
resulting_schema_version = max(resulting_schema_version, [2]) |
|
| 312 |
new_item_obj['required_mappings'] = required |
|
| 313 |
|
|
| 314 |
# handle 'permissions' field |
|
| 315 |
permissions = item_def.get('permissions', {})
|
|
| 316 |
processed_permissions = {}
|
|
| 317 |
|
|
| 318 |
if permissions.get('cors_bypass'):
|
|
| 319 |
processed_permissions['cors_bypass'] = True |
|
| 320 |
if permissions.get('eval'):
|
|
| 321 |
processed_permissions['eval'] = True |
|
| 322 |
|
|
| 323 |
if processed_permissions: |
|
| 324 |
new_item_obj['permissions'] = processed_permissions |
|
| 325 |
resulting_schema_version = max(resulting_schema_version, [2]) |
|
| 326 |
|
|
| 327 |
# handle '{min,max}_haketilo_version' fields
|
|
| 328 |
for minmax, default in ('min', [1]), ('max', [65536]):
|
|
| 329 |
constraint = item_def.get(f'{minmax}_haketilo_version')
|
|
| 330 |
if constraint in (None, default): |
|
| 331 |
continue |
|
| 332 |
|
|
| 333 |
copy_props.append(f'{minmax}_haketilo_version')
|
|
| 334 |
resulting_schema_version = max(resulting_schema_version, [2]) |
|
| 335 |
|
|
| 336 |
new_item_obj.update((p, item_def[p]) for p in copy_props) |
|
| 337 |
|
|
| 338 |
new_item_obj['$schema'] = ''.join([ |
|
| 339 |
schemas_root, |
|
| 340 |
f'/api_{as_what}_description',
|
|
| 341 |
'-', |
|
| 342 |
util.version_string(resulting_schema_version), |
|
| 343 |
'.schema.json' |
|
| 344 |
]) |
|
| 345 |
new_item_obj['type'] = as_what |
|
| 294 | 346 |
new_item_obj['source_copyright'] = self.copyright_file_refs |
| 295 |
new_item_obj['source_name'] = self.source_name |
|
| 296 |
new_item_obj['generated_by'] = generated_by |
|
| 347 |
new_item_obj['source_name'] = self.source_name
|
|
| 348 |
new_item_obj['generated_by'] = generated_by
|
|
| 297 | 349 |
|
| 298 | 350 |
item_list.append(new_item_obj) |
| 299 | 351 |
|
| 300 | 352 |
props_in_ref = ('type', 'identifier', 'version', 'long_name')
|
| 301 | 353 |
return dict([(prop, new_item_obj[prop]) for prop in props_in_ref]) |
| 302 | 354 |
|
| 303 |
def _process_index_json(self, index_obj: dict): |
|
| 355 |
def _process_index_json(self, index_obj: dict, |
|
| 356 |
major_schema_version: int) -> None: |
|
| 304 | 357 |
""" |
| 305 | 358 |
Process 'index_obj' as contents of source package's index.json and store |
| 306 | 359 |
in memory this source package's zipfile as well as package's individual |
| 307 | 360 |
files and computed definitions of the source package and items defined |
| 308 | 361 |
in it. |
| 309 | 362 |
""" |
| 310 |
index_validator.validate(index_obj) |
|
| 363 |
schema_name = f'package_source-{major_schema_version}.schema.json';
|
|
| 364 |
|
|
| 365 |
util.validator_for(schema_name).validate(index_obj) |
|
| 311 | 366 |
|
| 312 |
schema = f'{schemas_root}/api_source_description-1.schema.json'
|
|
| 367 |
match = re.match(r'.*-((([1-9][0-9]*|0)\.)+)schema\.json$', |
|
| 368 |
index_obj['$schema']) |
|
| 369 |
self.source_schema_ver = \ |
|
| 370 |
[int(n) for n in filter(None, match.group(1).split('.'))]
|
|
| 371 |
|
|
| 372 |
out_schema = f'{schemas_root}/api_source_description-1.schema.json'
|
|
| 313 | 373 |
|
| 314 | 374 |
self.source_name = index_obj['source_name'] |
| 315 | 375 |
|
| 316 | 376 |
generate_spdx = index_obj.get('reuse_generate_spdx_report', False)
|
| 317 | 377 |
if generate_spdx: |
| 318 | 378 |
contents = generate_spdx_report(self.srcdir) |
| 319 |
spdx_path = (self.srcdir / 'report.spdx').resolve()
|
|
| 379 |
spdx_path = PurePosixPath('report.spdx')
|
|
| 320 | 380 |
spdx_ref = FileRef(spdx_path, contents) |
| 321 | 381 |
|
| 322 |
spdx_ref.include_in_zipfile = False
|
|
| 382 |
spdx_ref.include_in_source_archive = False
|
|
| 323 | 383 |
self.files_by_path[spdx_path] = spdx_ref |
| 324 | 384 |
|
| 325 |
self.copyright_file_refs = \ |
|
| 326 |
[self._process_file(f['file']) for f in index_obj['copyright']] |
|
| 385 |
piggyback_def = None |
|
| 386 |
if self.source_schema_ver >= [1, 1] and 'piggyback_on' in index_obj: |
|
| 387 |
piggyback_def = index_obj['piggyback_on'] |
|
| 327 | 388 |
|
| 328 |
if generate_spdx and not spdx_ref.include_in_distribution: |
|
| 329 |
raise FileReferenceError(_('report_spdx_not_in_copyright_list'))
|
|
| 389 |
with piggybacked_system(piggyback_def, self.piggyback_files) \ |
|
| 390 |
as piggybacked: |
|
| 391 |
copyright_to_process = [ |
|
| 392 |
*(file_ref['file'] for file_ref in index_obj['copyright']), |
|
| 393 |
*piggybacked.package_license_files |
|
| 394 |
] |
|
| 395 |
self.copyright_file_refs = [self._process_file(f, piggybacked) |
|
| 396 |
for f in copyright_to_process] |
|
| 330 | 397 |
|
| 331 |
item_refs = [self._process_item(d) for d in index_obj['definitions']] |
|
| 398 |
if generate_spdx and not spdx_ref.include_in_distribution: |
|
| 399 |
raise FileReferenceError(_('report_spdx_not_in_copyright_list'))
|
|
| 332 | 400 |
|
| 333 |
for file_ref in index_obj.get('additional_files', []):
|
|
| 334 |
self._process_file(file_ref['file'], include_in_distribution=False) |
|
| 401 |
item_refs = [] |
|
| 402 |
for item_def in index_obj['definitions']: |
|
| 403 |
if 'mapping' in item_def['type']: |
|
| 404 |
ref = self._process_item('mapping', item_def, piggybacked)
|
|
| 405 |
item_refs.append(ref) |
|
| 406 |
if 'resource' in item_def['type']: |
|
| 407 |
ref = self._process_item('resource', item_def, piggybacked)
|
|
| 408 |
item_refs.append(ref) |
|
| 335 | 409 |
|
| 336 |
root_dir_path = Path(self.source_name) |
|
| 410 |
for file_ref in index_obj.get('additional_files', []):
|
|
| 411 |
self._process_file(file_ref['file'], piggybacked, |
|
| 412 |
include_in_distribution=False) |
|
| 337 | 413 |
|
| 338 |
source_archives_obj = {
|
|
| 339 |
'zip' : {
|
|
| 340 |
'sha256': self._prepare_source_package_zip(root_dir_path) |
|
| 341 |
} |
|
| 342 |
} |
|
| 414 |
zipfile_sha256 = self._prepare_source_package_zip\ |
|
| 415 |
(self.source_name, piggybacked) |
|
| 416 |
|
|
| 417 |
source_archives_obj = {'zip' : {'sha256': zipfile_sha256}}
|
|
| 343 | 418 |
|
| 344 | 419 |
self.source_description = {
|
| 345 |
'$schema': schema, |
|
| 420 |
'$schema': out_schema,
|
|
| 346 | 421 |
'source_name': self.source_name, |
| 347 | 422 |
'source_copyright': self.copyright_file_refs, |
| 348 | 423 |
'upstream_url': index_obj['upstream_url'], |
| ... | ... | |
| 398 | 473 |
|
| 399 | 474 |
dir_type = click.Path(exists=True, file_okay=False, resolve_path=True) |
| 400 | 475 |
|
| 476 |
@click.command(help=_('build_package_from_srcdir_to_dstdir'))
|
|
| 401 | 477 |
@click.option('-s', '--srcdir', default='./', type=dir_type, show_default=True,
|
| 402 | 478 |
help=_('source_directory_to_build_from'))
|
| 403 | 479 |
@click.option('-i', '--index-json', default='index.json', type=click.Path(),
|
| 404 | 480 |
help=_('path_instead_of_index_json'))
|
| 481 |
@click.option('-p', '--piggyback-files', type=click.Path(),
|
|
| 482 |
help=_('path_instead_for_piggyback_files'))
|
|
| 405 | 483 |
@click.option('-d', '--dstdir', type=dir_type, required=True,
|
| 406 | 484 |
help=_('built_package_files_destination'))
|
| 407 | 485 |
@click.version_option(version=_version.version, prog_name='Hydrilla builder', |
| 408 | 486 |
message=_('%(prog)s_%(version)s_license'),
|
| 409 | 487 |
help=_('version_printing'))
|
| 410 |
def perform(srcdir, index_json, dstdir): |
|
| 411 |
"""<this will be replaced by a localized docstring for Click to pick up>""" |
|
| 412 |
build = Build(Path(srcdir), Path(index_json)) |
|
| 413 |
build.write_package_files(Path(dstdir)) |
|
| 414 |
|
|
| 415 |
perform.__doc__ = _('build_package_from_srcdir_to_dstdir')
|
|
| 488 |
def perform(srcdir, index_json, piggyback_files, dstdir): |
|
| 489 |
""" |
|
| 490 |
Execute Hydrilla builder to turn source package into a distributable one. |
|
| 416 | 491 |
|
| 417 |
perform = click.command()(perform) |
|
| 492 |
This command is meant to be the entry point of hydrilla-builder command |
|
| 493 |
exported by this package. |
|
| 494 |
""" |
|
| 495 |
build = Build(Path(srcdir), Path(index_json), |
|
| 496 |
piggyback_files and Path(piggyback_files)) |
|
| 497 |
build.write_package_files(Path(dstdir)) |
|
| src/hydrilla/builder/common_errors.py | ||
|---|---|---|
| 1 |
# SPDX-License-Identifier: AGPL-3.0-or-later |
|
| 2 |
|
|
| 3 |
# Error classes. |
|
| 4 |
# |
|
| 5 |
# This file is part of Hydrilla |
|
| 6 |
# |
|
| 7 |
# Copyright (C) 2022 Wojtek Kosior |
|
| 8 |
# |
|
| 9 |
# This program is free software: you can redistribute it and/or modify |
|
| 10 |
# it under the terms of the GNU Affero General Public License as |
|
| 11 |
# published by the Free Software Foundation, either version 3 of the |
|
| 12 |
# License, or (at your option) any later version. |
|
| 13 |
# |
|
| 14 |
# This program is distributed in the hope that it will be useful, |
|
| 15 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
| 16 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
| 17 |
# GNU Affero General Public License for more details. |
|
| 18 |
# |
|
| 19 |
# You should have received a copy of the GNU Affero General Public License |
|
| 20 |
# along with this program. If not, see <https://www.gnu.org/licenses/>. |
|
| 21 |
# |
|
| 22 |
# |
|
| 23 |
# I, Wojtek Kosior, thereby promise not to sue for violation of this |
|
| 24 |
# file's license. Although I request that you do not make use this code |
|
| 25 |
# in a proprietary program, I am not going to enforce this in court. |
|
| 26 |
|
|
| 27 |
""" |
|
| 28 |
This module defines error types for use in other parts of Hydrilla builder. |
|
| 29 |
""" |
|
| 30 |
|
|
| 31 |
# Enable using with Python 3.7. |
|
| 32 |
from __future__ import annotations |
|
| 33 |
|
|
| 34 |
from pathlib import Path |
|
| 35 |
|
|
| 36 |
from .. import util |
|
| 37 |
|
|
| 38 |
here = Path(__file__).resolve().parent |
|
| 39 |
|
|
| 40 |
_ = util.translation(here / 'locales').gettext |
|
| 41 |
|
|
| 42 |
class DistroError(Exception): |
|
| 43 |
""" |
|
| 44 |
Exception used to report problems when resolving an OS distribution. |
|
| 45 |
""" |
|
| 46 |
|
|
| 47 |
class FileReferenceError(Exception): |
|
| 48 |
""" |
|
| 49 |
Exception used to report various problems concerning files referenced from |
|
| 50 |
source package. |
|
| 51 |
""" |
|
| 52 |
|
|
| 53 |
class SubprocessError(Exception): |
|
| 54 |
""" |
|
| 55 |
Exception used to report problems related to execution of external |
|
| 56 |
processes, includes. various problems when calling apt-* and dpkg-* |
|
| 57 |
commands. |
|
| 58 |
""" |
|
| 59 |
def __init__(self, msg: str, cp: Optional[CP]=None) -> None: |
|
| 60 |
"""Initialize this SubprocessError""" |
|
| 61 |
if cp and cp.stdout: |
|
| 62 |
msg = '\n\n'.join([msg, _('STDOUT_OUTPUT_heading'), cp.stdout])
|
|
| 63 |
|
|
| 64 |
if cp and cp.stderr: |
|
| 65 |
msg = '\n\n'.join([msg, _('STDERR_OUTPUT_heading'), cp.stderr])
|
|
| 66 |
|
|
| 67 |
super().__init__(msg) |
|
| src/hydrilla/builder/local_apt.py | ||
|---|---|---|
| 1 |
# SPDX-License-Identifier: AGPL-3.0-or-later |
|
| 2 |
|
|
| 3 |
# Using a local APT. |
|
| 4 |
# |
|
| 5 |
# This file is part of Hydrilla |
|
| 6 |
# |
|
| 7 |
# Copyright (C) 2022 Wojtek Kosior |
|
| 8 |
# |
|
| 9 |
# This program is free software: you can redistribute it and/or modify |
|
| 10 |
# it under the terms of the GNU Affero General Public License as |
|
| 11 |
# published by the Free Software Foundation, either version 3 of the |
|
| 12 |
# License, or (at your option) any later version. |
|
| 13 |
# |
|
| 14 |
# This program is distributed in the hope that it will be useful, |
|
| 15 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
| 16 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
| 17 |
# GNU Affero General Public License for more details. |
|
| 18 |
# |
|
| 19 |
# You should have received a copy of the GNU Affero General Public License |
|
| 20 |
# along with this program. If not, see <https://www.gnu.org/licenses/>. |
|
| 21 |
# |
|
| 22 |
# |
|
| 23 |
# I, Wojtek Kosior, thereby promise not to sue for violation of this |
|
| 24 |
# file's license. Although I request that you do not make use this code |
|
| 25 |
# in a proprietary program, I am not going to enforce this in court. |
|
| 26 |
|
|
| 27 |
# Enable using with Python 3.7. |
|
| 28 |
from __future__ import annotations |
|
| 29 |
|
|
| 30 |
import zipfile |
|
| 31 |
import shutil |
|
| 32 |
import re |
|
| 33 |
import subprocess |
|
| 34 |
CP = subprocess.CompletedProcess |
|
| 35 |
from pathlib import Path, PurePosixPath |
|
| 36 |
from tempfile import TemporaryDirectory, NamedTemporaryFile |
|
| 37 |
from hashlib import sha256 |
|
| 38 |
from urllib.parse import unquote |
|
| 39 |
from contextlib import contextmanager |
|
| 40 |
from typing import Optional, Iterable |
|
| 41 |
|
|
| 42 |
from .. import util |
|
| 43 |
from .piggybacking import Piggybacked |
|
| 44 |
from .common_errors import * |
|
| 45 |
|
|
| 46 |
here = Path(__file__).resolve().parent |
|
| 47 |
|
|
| 48 |
_ = util.translation(here / 'locales').gettext |
|
| 49 |
|
|
| 50 |
""" |
|
| 51 |
Default cache directory to save APT configurations and downloaded GPG keys in. |
|
| 52 |
""" |
|
| 53 |
default_apt_cache_dir = Path.home() / '.cache' / 'hydrilla' / 'builder' / 'apt' |
|
| 54 |
|
|
| 55 |
""" |
|
| 56 |
Default keyserver to use. |
|
| 57 |
""" |
|
| 58 |
default_keyserver = 'hkps://keyserver.ubuntu.com:443' |
|
| 59 |
|
|
| 60 |
""" |
|
| 61 |
Default keys to download when using a local APT. |
|
| 62 |
""" |
|
| 63 |
default_keys = [ |
|
| 64 |
# Trisquel |
|
| 65 |
'E6C27099CA21965B734AEA31B4EFB9F38D8AEBF1', |
|
| 66 |
'60364C9869F92450421F0C22B138CA450C05112F', |
|
| 67 |
# Ubuntu |
|
| 68 |
'630239CC130E1A7FD81A27B140976EAF437D05B5', |
|
| 69 |
'790BC7277767219C42C86F933B4FE6ACC0B21F32', |
|
| 70 |
'F6ECB3762474EDA9D21B7022871920D1991BC93C', |
|
| 71 |
# Debian |
|
| 72 |
'6D33866EDD8FFA41C0143AEDDCC9EFBF77E11517', |
|
| 73 |
'80D15823B7FD1561F9F7BCDDDC30D7C23CBBABEE', |
|
| 74 |
'AC530D520F2F3269F5E98313A48449044AAD5C5D' |
|
| 75 |
] |
|
| 76 |
|
|
| 77 |
"""sources.list file contents for known distros.""" |
|
| 78 |
default_lists = {
|
|
| 79 |
'nabia': [f'{type} http://archive.trisquel.info/trisquel/ nabia{suf} main'
|
|
| 80 |
for type in ('deb', 'deb-src')
|
|
| 81 |
for suf in ('', '-updates', '-security')]
|
|
| 82 |
} |
|
| 83 |
|
|
| 84 |
class GpgError(Exception): |
|
| 85 |
""" |
|
| 86 |
Exception used to report various problems when calling GPG. |
|
| 87 |
""" |
|
| 88 |
|
|
| 89 |
class AptError(SubprocessError): |
|
| 90 |
""" |
|
| 91 |
Exception used to report various problems when calling apt-* and dpkg-* |
|
| 92 |
commands. |
|
| 93 |
""" |
|
| 94 |
|
|
| 95 |
def run(command, **kwargs): |
|
| 96 |
"""A wrapped around subprocess.run that sets some default options.""" |
|
| 97 |
return subprocess.run(command, **kwargs, env={'LANG': 'en_US'},
|
|
| 98 |
capture_output=True, text=True) |
|
| 99 |
|
|
| 100 |
class Apt: |
|
| 101 |
""" |
|
| 102 |
This class represents an APT instance and can be used to call apt-get |
|
| 103 |
commands with it. |
|
| 104 |
""" |
|
| 105 |
def __init__(self, apt_conf: str) -> None: |
|
| 106 |
"""Initialize this Apt object.""" |
|
| 107 |
self.apt_conf = apt_conf |
|
| 108 |
|
|
| 109 |
def get(self, *args: str, **kwargs) -> CP: |
|
| 110 |
""" |
|
| 111 |
Run apt-get with the specified arguments and raise a meaningful AptError |
|
| 112 |
when something goes wrong. |
|
| 113 |
""" |
|
| 114 |
command = ['apt-get', '-c', self.apt_conf, *args] |
|
| 115 |
try: |
|
| 116 |
cp = run(command, **kwargs) |
|
| 117 |
except FileNotFoundError: |
|
| 118 |
msg = _('couldnt_execute_{}_is_it_installed').format('apt-get')
|
|
| 119 |
raise AptError(msg) |
|
| 120 |
|
|
| 121 |
if cp.returncode != 0: |
|
| 122 |
msg = _('command_{}_failed').format(' '.join(command))
|
|
| 123 |
raise AptError(msg, cp) |
|
| 124 |
|
|
| 125 |
return cp |
|
| 126 |
|
|
| 127 |
def cache_dir() -> Path: |
|
| 128 |
""" |
|
| 129 |
Return the directory used to cache data (APT configurations, keyrings) to |
|
| 130 |
speed up repeated operations. |
|
| 131 |
|
|
| 132 |
This function first ensures the directory exists. |
|
| 133 |
""" |
|
| 134 |
default_apt_cache_dir.mkdir(parents=True, exist_ok=True) |
|
| 135 |
return default_apt_cache_dir |
|
| 136 |
|
|
| 137 |
class SourcesList: |
|
| 138 |
"""Representation of apt's sources.list contents.""" |
|
| 139 |
def __init__(self, list: [str]=[], codename: Optional[str]=None) -> None: |
|
| 140 |
"""Initialize this SourcesList.""" |
|
| 141 |
self.codename = None |
|
| 142 |
self.list = [*list] |
|
| 143 |
self.has_extra_entries = bool(self.list) |
|
| 144 |
|
|
| 145 |
if codename is not None: |
|
| 146 |
if codename not in default_lists: |
|
| 147 |
raise DistroError(_('distro_{}_unknown').format(codename))
|
|
| 148 |
|
|
| 149 |
self.codename = codename |
|
| 150 |
self.list.extend(default_lists[codename]) |
|
| 151 |
|
|
| 152 |
def identity(self) -> str: |
|
| 153 |
""" |
|
| 154 |
Produce a string that uniquely identifies this sources.list contents. |
|
| 155 |
""" |
|
| 156 |
if self.codename and not self.has_extra_entries: |
|
| 157 |
return self.codename |
|
| 158 |
|
|
| 159 |
return sha256('\n'.join(sorted(self.list)).encode()).digest().hex()
|
|
| 160 |
|
|
| 161 |
def apt_conf(directory: Path) -> str: |
|
| 162 |
""" |
|
| 163 |
Given local APT's directory, produce a configuration suitable for running |
|
| 164 |
APT there. |
|
| 165 |
|
|
| 166 |
'directory' must not contain any special characters including quotes and |
|
| 167 |
spaces. |
|
| 168 |
""" |
|
| 169 |
return f''' |
|
| 170 |
Architecture "amd64"; |
|
| 171 |
Dir "{directory}";
|
|
| 172 |
Dir::State "{directory}/var/lib/apt";
|
|
| 173 |
Dir::State::status "{directory}/var/lib/dpkg/status";
|
|
| 174 |
Dir::Etc::SourceList "{directory}/etc/apt.sources.list";
|
|
| 175 |
Dir::Etc::SourceParts ""; |
|
| 176 |
Dir::Cache "{directory}/var/cache/apt";
|
|
| 177 |
pkgCacheGen::Essential "none"; |
|
| 178 |
Dir::Etc::Trusted "{directory}/etc/trusted.gpg";
|
|
| 179 |
''' |
|
| 180 |
|
|
| 181 |
def apt_keyring(keys: [str]) -> bytes: |
|
| 182 |
""" |
|
| 183 |
Download the requested keys if necessary and export them as a keyring |
|
| 184 |
suitable for passing to APT. |
|
| 185 |
|
|
| 186 |
The keyring is returned as a bytes value that should be written to a file. |
|
| 187 |
""" |
|
| 188 |
try: |
|
| 189 |
from gnupg import GPG |
|
| 190 |
except ModuleNotFoundError: |
|
| 191 |
raise GpgError(_('couldnt_import_{}_is_it_installed').format('gnupg'))
|
|
| 192 |
|
|
| 193 |
gpg = GPG(keyring=str(cache_dir() / 'master_keyring.gpg')) |
|
| 194 |
for key in keys: |
|
| 195 |
if gpg.list_keys(keys=[key]) != []: |
|
| 196 |
continue |
|
| 197 |
|
|
| 198 |
if gpg.recv_keys(default_keyserver, key).imported == 0: |
|
| 199 |
raise GpgError(_('gpg_couldnt_recv_key_{}').format(key))
|
|
| 200 |
|
|
| 201 |
return gpg.export_keys(keys, armor=False, minimal=True) |
|
| 202 |
|
|
| 203 |
def cache_apt_root(apt_root: Path, destination_zip: Path) -> None: |
|
| 204 |
""" |
|
| 205 |
Zip an APT root directory for later use and move the zipfile to the |
|
| 206 |
requested destination. |
|
| 207 |
""" |
|
| 208 |
temporary_zip_path = None |
|
| 209 |
try: |
|
| 210 |
tmpfile = NamedTemporaryFile(suffix='.zip', prefix='tmp_', |
|
| 211 |
dir=cache_dir(), delete=False) |
|
| 212 |
temporary_zip_path = Path(tmpfile.name) |
|
| 213 |
|
|
| 214 |
to_skip = {Path('etc') / 'apt.conf', Path('etc') / 'trusted.gpg'}
|
|
| 215 |
|
|
| 216 |
with zipfile.ZipFile(tmpfile, 'w') as zf: |
|
| 217 |
for member in apt_root.rglob('*'):
|
|
| 218 |
relative = member.relative_to(apt_root) |
|
| 219 |
if relative not in to_skip: |
|
| 220 |
# This call will also properly add empty folders to zip file |
|
| 221 |
zf.write(member, relative, zipfile.ZIP_DEFLATED) |
|
| 222 |
|
|
| 223 |
shutil.move(temporary_zip_path, destination_zip) |
|
| 224 |
finally: |
|
| 225 |
if temporary_zip_path is not None and temporary_zip_path.exists(): |
|
| 226 |
temporary_zip_path.unlink() |
|
| 227 |
|
|
| 228 |
def setup_local_apt(directory: Path, list: SourcesList, keys: [str]) -> Apt: |
|
| 229 |
""" |
|
| 230 |
Create files and directories necessary for running APT without root rights |
|
| 231 |
inside 'directory'. |
|
| 232 |
|
|
| 233 |
'directory' must not contain any special characters including quotes and |
|
| 234 |
spaces and must be empty. |
|
| 235 |
|
|
| 236 |
Return an Apt object that can be used to call apt-get commands. |
|
| 237 |
""" |
|
| 238 |
apt_root = directory / 'apt_root' |
|
| 239 |
|
|
| 240 |
conf_text = apt_conf(apt_root) |
|
| 241 |
keyring_bytes = apt_keyring(keys) |
|
| 242 |
|
|
| 243 |
apt_zipfile = cache_dir() / f'apt_{list.identity()}.zip'
|
|
| 244 |
if apt_zipfile.exists(): |
|
| 245 |
with zipfile.ZipFile(apt_zipfile) as zf: |
|
| 246 |
zf.extractall(apt_root) |
|
| 247 |
|
|
| 248 |
for to_create in ( |
|
| 249 |
apt_root / 'var' / 'lib' / 'apt' / 'partial', |
|
| 250 |
apt_root / 'var' / 'lib' / 'apt' / 'lists', |
|
| 251 |
apt_root / 'var' / 'cache' / 'apt' / 'archives' / 'partial', |
|
| 252 |
apt_root / 'etc' / 'apt' / 'preferences.d', |
|
| 253 |
apt_root / 'var' / 'lib' / 'dpkg', |
|
| 254 |
apt_root / 'var' / 'log' / 'apt' |
|
| 255 |
): |
|
| 256 |
to_create.mkdir(parents=True, exist_ok=True) |
|
| 257 |
|
|
| 258 |
conf_path = apt_root / 'etc' / 'apt.conf' |
|
| 259 |
trusted_path = apt_root / 'etc' / 'trusted.gpg' |
|
| 260 |
status_path = apt_root / 'var' / 'lib' / 'dpkg' / 'status' |
|
| 261 |
list_path = apt_root / 'etc' / 'apt.sources.list' |
|
| 262 |
|
|
| 263 |
conf_path.write_text(conf_text) |
|
| 264 |
trusted_path.write_bytes(keyring_bytes) |
|
| 265 |
status_path.touch() |
|
| 266 |
list_path.write_text('\n'.join(list.list))
|
|
| 267 |
|
|
| 268 |
apt = Apt(str(conf_path)) |
|
| 269 |
apt.get('update')
|
|
| 270 |
|
|
| 271 |
cache_apt_root(apt_root, apt_zipfile) |
|
| 272 |
|
|
| 273 |
return apt |
|
| 274 |
|
|
| 275 |
@contextmanager |
|
| 276 |
def local_apt(list: SourcesList, keys: [str]) -> Iterable[Apt]: |
|
| 277 |
""" |
|
| 278 |
Create a temporary directory with proper local APT configuration in it. |
|
| 279 |
Yield an Apt object that can be used to issue apt-get commands. |
|
| 280 |
|
|
| 281 |
This function returns a context manager that will remove the directory on |
|
| 282 |
close. |
|
| 283 |
""" |
|
| 284 |
with TemporaryDirectory() as td: |
|
| 285 |
td = Path(td) |
|
| 286 |
yield setup_local_apt(td, list, keys) |
|
| 287 |
|
|
| 288 |
def download_apt_packages(list: SourcesList, keys: [str], packages: [str], |
|
| 289 |
destination_dir: Path, with_deps: bool) -> [str]: |
|
| 290 |
""" |
|
| 291 |
Set up a local APT, update it using the specified sources.list configuration |
|
| 292 |
and use it to download the specified packages. |
|
| 293 |
|
|
| 294 |
This function downloads .deb files of packages matching the amd64 |
|
| 295 |
architecture (which includes packages with architecture 'all') as well as |
|
| 296 |
all their corresponding source package files and (if requested) the debs |
|
| 297 |
and source files of all their declared dependencies. |
|
| 298 |
|
|
| 299 |
Return value is a list of names of all downloaded files. |
|
| 300 |
""" |
|
| 301 |
install_line_regex = re.compile(r'^Inst (?P<name>\S+) \((?P<version>\S+) ') |
|
| 302 |
|
|
| 303 |
with local_apt(list, keys) as apt: |
|
| 304 |
if with_deps: |
|
| 305 |
cp = apt.get('install', '--yes', '--just-print', *packages)
|
|
| 306 |
|
|
| 307 |
lines = cp.stdout.split('\n')
|
|
| 308 |
matches = [install_line_regex.match(l) for l in lines] |
|
| 309 |
packages = [f'{m.group("name")}={m.group("version")}'
|
|
| 310 |
for m in matches if m] |
|
| 311 |
|
|
| 312 |
if not packages: |
|
| 313 |
raise AptError(_('apt_install_output_not_understood'), cp)
|
|
| 314 |
|
|
| 315 |
# Download .debs to indirectly to destination_dir by first placing them |
|
| 316 |
# in a temporary subdirectory. |
|
| 317 |
with TemporaryDirectory(dir=destination_dir) as td: |
|
| 318 |
td = Path(td) |
|
| 319 |
cp = apt.get('download', *packages, cwd=td)
|
|
| 320 |
|
|
| 321 |
deb_name_regex = re.compile( |
|
| 322 |
r''' |
|
| 323 |
^ |
|
| 324 |
(?P<name>[^_]+) |
|
| 325 |
_ |
|
| 326 |
(?P<ver>[^_]+) |
|
| 327 |
_ |
|
| 328 |
.+ # architecture (or 'all') |
|
| 329 |
\.deb |
|
| 330 |
$ |
|
| 331 |
''', |
|
| 332 |
re.VERBOSE) |
|
| 333 |
|
|
| 334 |
names_vers = [] |
|
Also available in: Unified diff
New upstream version 1.1~beta1