Commit 3c443e22 authored by David Runge's avatar David Runge
Browse files

Merge branch 'issues/9' into 'master'

Add integration tests for db2json and json2db

Closes #9

See merge request !5
parents bb999eab 9ba649d1
Pipeline #5874 passed with stage
in 5 minutes and 50 seconds
...@@ -6,19 +6,25 @@ default: ...@@ -6,19 +6,25 @@ default:
image: "archlinux:latest" image: "archlinux:latest"
stages: stages:
- lint - test
.unit_test: .test:
stage: lint stage: test
before_script: before_script:
- pacman --noconfirm -Fy
- pacman --noconfirm -Syu --needed base-devel python-poetry python-tox - pacman --noconfirm -Syu --needed base-devel python-poetry python-tox
linter: linter:
extends: .unit_test extends: .test
script: script:
- tox -e linter - tox -e linter
coverage: coverage:
extends: .unit_test extends: .test
script: script:
- tox -e coverage - tox -e coverage
integration:
extends: .test
script:
- tox -e integration
...@@ -324,6 +324,18 @@ category = "dev" ...@@ -324,6 +324,18 @@ category = "dev"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "subprocess-tee"
version = "0.2.0"
description = "subprocess-tee"
category = "main"
optional = false
python-versions = ">=3.6"
[package.extras]
rich = ["enrich (>=9.5.1)"]
test = ["enrich (>=1.2.5)", "mock (>=3.0.5)", "pytest-cov (>=2.7.1)", "pytest-plus", "pytest-xdist (>=1.29.0)", "pytest (>=6.1.0)"]
[[package]] [[package]]
name = "toml" name = "toml"
version = "0.10.2" version = "0.10.2"
...@@ -391,7 +403,7 @@ testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", ...@@ -391,7 +403,7 @@ testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)",
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.9" python-versions = "^3.9"
content-hash = "31bb6f8018746cb2480f660aa14f53948ccc3a3505642533a576f5c05ff09aaa" content-hash = "4b85406328a00d6d6cc7d9652a65481a106ee7e2048b9cae5f900b199003c638"
[metadata.files] [metadata.files]
appdirs = [ appdirs = [
...@@ -697,6 +709,10 @@ six = [ ...@@ -697,6 +709,10 @@ six = [
{file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
{file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"},
] ]
subprocess-tee = [
{file = "subprocess-tee-0.2.0.tar.gz", hash = "sha256:8374dfdb1bd6e64278afa67be41f13772b677ae768b523550060fe41d8274faa"},
{file = "subprocess_tee-0.2.0-py3-none-any.whl", hash = "sha256:f748b5a998f70de1fe2ff5b4d3e28f8671149cb33a3ff90537db26b90122881d"},
]
toml = [ toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
......
...@@ -15,6 +15,7 @@ pyalpm = "^0.9.2" ...@@ -15,6 +15,7 @@ pyalpm = "^0.9.2"
pydantic = "^1.8.1" pydantic = "^1.8.1"
orjson = "^3.5.1" orjson = "^3.5.1"
Jinja2 = "^2.11.3" Jinja2 = "^2.11.3"
subprocess-tee = "^0.2.0"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
tox = "^3.23.0" tox = "^3.23.0"
...@@ -30,6 +31,9 @@ mock = "^4.0.3" ...@@ -30,6 +31,9 @@ mock = "^4.0.3"
db2json = "repo_management.cli:db2json" db2json = "repo_management.cli:db2json"
json2db = "repo_management.cli:json2db" json2db = "repo_management.cli:json2db"
[tool.pytest.ini_options]
markers = ["integration"]
[tool.black] [tool.black]
line-length = 120 line-length = 120
exclude = ''' exclude = '''
......
from repo_management import argparse, defaults, operations from argparse import ArgumentTypeError
from sys import exit
from repo_management import argparse, defaults, errors, operations
def db2json() -> None: def db2json() -> None:
...@@ -8,11 +11,15 @@ def db2json() -> None: ...@@ -8,11 +11,15 @@ def db2json() -> None:
repository database file. repository database file.
""" """
args = argparse.ArgParseFactory.db2json().parse_args() try:
operations.dump_db_to_json_files( args = argparse.ArgParseFactory.db2json().parse_args()
input_path=args.db_file, operations.dump_db_to_json_files(
output_path=args.output_dir, input_path=args.db_file,
) output_path=args.output_dir,
)
except (errors.RepoManagementError, ArgumentTypeError) as e:
print(e)
exit(1)
def json2db() -> None: def json2db() -> None:
...@@ -22,9 +29,13 @@ def json2db() -> None: ...@@ -22,9 +29,13 @@ def json2db() -> None:
in a directory. in a directory.
""" """
args = argparse.ArgParseFactory.json2db().parse_args() try:
operations.create_db_from_json_files( args = argparse.ArgParseFactory.json2db().parse_args()
input_path=args.input_dir, operations.create_db_from_json_files(
output_path=args.db_file, input_path=args.input_dir,
db_type=defaults.RepoDbType.FILES if args.files else defaults.RepoDbType.DEFAULT, output_path=args.db_file,
) db_type=defaults.RepoDbType.FILES if args.files else defaults.RepoDbType.DEFAULT,
)
except (errors.RepoManagementError, ArgumentTypeError) as e:
print(e)
exit(1)
from pathlib import Path
from subprocess import PIPE, STDOUT, CalledProcessError
from typing import Dict, List, Optional, Union
from subprocess_tee import CompletedProcess, run
def _print_env(env: Optional[Dict[str, str]]) -> None:
"""Print the environment variables from a dict
Parameters
----------
env: Optional[Dict[str, str]]
An optional dict with environment variables and their values
"""
if env:
for (key, value) in sorted(env.items()):
print(f"{key}: {value}")
def run_command(
cmd: Union[str, List[str]],
env: Optional[Dict[str, str]] = None,
debug: bool = False,
echo: bool = False,
quiet: bool = False,
check: bool = False,
cwd: Union[Optional[str], Optional[Path]] = None,
) -> CompletedProcess:
"""Run a command
Parameters
----------
cmd: Union[str, List[str]]
A string or list of strings that will be passed to subprocess.run()
env: Optional[Dict[str, str]]
A dict of environment variables and their respective values (defaults to None)
debug: bool
Whether to run in debug mode, which prints environment variables and command output (defaults to False)
echo: bool
Whether to print the command before running it (defaults to False)
quiet: bool
Whether to print the output of command while running it (defaults to False)
check: bool
Whether to check the return code of the command, which implies raising CallecProcessError (defaults to False)
cwd: Union[Optional[str], Optional[Path]]
In which directory to run the command (defaults to None, which means current working directory)
Raises
------
CalledProcessError
If check is True and the commands return code is not 0
Returns
-------
CompletedProcess
The result of the command
"""
if debug:
_print_env(env)
result = run(
cmd,
env=env,
stdout=PIPE,
stderr=STDOUT,
echo=echo or debug,
quiet=quiet,
cwd=cwd,
)
if result.returncode != 0 and check:
raise CalledProcessError(
returncode=result.returncode,
cmd=result.args,
output=result.stdout,
stderr=result.stderr,
)
return result
...@@ -70,6 +70,11 @@ def _desc_data_line_to_dicts( ...@@ -70,6 +70,11 @@ def _desc_data_line_to_dicts(
A dict for instances of type string A dict for instances of type string
int_types: Dict[str, int] int_types: Dict[str, int]
A dict for instances of type int A dict for instances of type int
Raises
------
ValueError
If a string is provided for a field of type int, that can not be cast to type int
""" """
if current_type == defaults.FieldType.STRING_LIST: if current_type == defaults.FieldType.STRING_LIST:
...@@ -93,10 +98,9 @@ def _desc_data_to_model(data: io.StringIO) -> models.PackageDesc: ...@@ -93,10 +98,9 @@ def _desc_data_to_model(data: io.StringIO) -> models.PackageDesc:
Raises Raises
------ ------
ValueError errors.RepoManagementValidationError
If a string is provided for a field of type int, that can not be cast to type int If a pydantic.error_wrappers.ValidationError is raised (e.g. due to a missing attribute) or if a ValueError is
pydantic.error_wrappers.ValidationError raised when converting data (e.g. when calling _desc_data_line_to_dicts())
If a required field is missing
Returns Returns
------- -------
...@@ -121,21 +125,26 @@ def _desc_data_to_model(data: io.StringIO) -> models.PackageDesc: ...@@ -121,21 +125,26 @@ def _desc_data_to_model(data: io.StringIO) -> models.PackageDesc:
continue continue
if current_header: if current_header:
_desc_data_line_to_dicts( try:
current_header=current_header, _desc_data_line_to_dicts(
current_type=current_type, current_header=current_header,
line=line, current_type=current_type,
string_list_types=string_list_types, line=line,
string_types=string_types, string_list_types=string_list_types,
int_types=int_types, string_types=string_types,
) int_types=int_types,
)
except ValueError as e:
raise errors.RepoManagementValidationError(
f"A validation error occured while creating the file:\n\n{data.getvalue()}\n{e}"
)
merged_dict: Dict[str, Union[int, str, List[str]]] = {**int_types, **string_types, **string_list_types} merged_dict: Dict[str, Union[int, str, List[str]]] = {**int_types, **string_types, **string_list_types}
try: try:
return models.PackageDesc(**merged_dict) return models.PackageDesc(**merged_dict)
except ValidationError as e: except ValidationError as e:
raise errors.RepoManagementValidationError( raise errors.RepoManagementValidationError(
f"An error occured while validating the file: {data.getvalue()}\n{e}" f"A validation error occured while creating the file:\n\n{data.getvalue()}\n{e}"
) )
......
from argparse import Namespace import shutil
import tempfile
from argparse import ArgumentTypeError, Namespace
from pathlib import Path
from typing import Dict, Iterator
from mock import Mock, patch from mock import Mock, patch
from pytest import mark from pytest import fixture, mark
from repo_management import cli, defaults from repo_management import cli, commands, defaults, errors
@fixture(scope="function")
def empty_dir() -> Iterator[Path]:
directory = tempfile.mkdtemp()
yield Path(directory)
shutil.rmtree(directory)
@fixture(scope="function")
def empty_file() -> Iterator[Path]:
[foo, file_name] = tempfile.mkstemp()
yield Path(file_name)
@mark.parametrize(
"fail_argparse, fail_dump",
[
(False, False),
(True, False),
(False, True),
],
)
@patch("repo_management.operations.dump_db_to_json_files") @patch("repo_management.operations.dump_db_to_json_files")
@patch("repo_management.argparse.ArgParseFactory") @patch("repo_management.argparse.ArgParseFactory")
def test_db2json(argparsefactory_mock: Mock, dump_db_to_json_files_mock: Mock) -> None: @patch("repo_management.cli.exit")
def test_db2json(
exit_mock: Mock,
argparsefactory_mock: Mock,
dump_db_to_json_files_mock: Mock,
fail_argparse: bool,
fail_dump: bool,
) -> None:
namespace = Namespace(db_file="db_file", output_dir="output_dir") namespace = Namespace(db_file="db_file", output_dir="output_dir")
argparsefactory_mock.db2json.return_value = Mock(parse_args=Mock(return_value=namespace)) if fail_argparse:
argparsefactory_mock.db2json.side_effect = ArgumentTypeError
else:
argparsefactory_mock.db2json.return_value = Mock(parse_args=Mock(return_value=namespace))
if fail_dump:
dump_db_to_json_files_mock.side_effect = errors.RepoManagementError
cli.db2json() cli.db2json()
dump_db_to_json_files_mock.assert_called_once_with(input_path=namespace.db_file, output_path=namespace.output_dir) if fail_argparse or fail_dump:
exit_mock.assert_called_once_with(1)
else:
dump_db_to_json_files_mock.assert_called_once_with(
input_path=namespace.db_file,
output_path=namespace.output_dir,
)
@mark.parametrize("files, db_type", [(True, defaults.RepoDbType.FILES), (False, defaults.RepoDbType.DEFAULT)]) @mark.parametrize(
"files, db_type, fail_argparse, fail_create",
[
(True, defaults.RepoDbType.FILES, False, False),
(False, defaults.RepoDbType.DEFAULT, False, False),
(True, defaults.RepoDbType.FILES, True, False),
(False, defaults.RepoDbType.DEFAULT, True, False),
(True, defaults.RepoDbType.FILES, False, True),
(False, defaults.RepoDbType.DEFAULT, False, True),
],
)
@patch("repo_management.operations.create_db_from_json_files") @patch("repo_management.operations.create_db_from_json_files")
@patch("repo_management.argparse.ArgParseFactory") @patch("repo_management.argparse.ArgParseFactory")
@patch("repo_management.cli.exit")
def test_json2db( def test_json2db(
argparsefactory_mock: Mock, create_db_from_json_files: Mock, files: bool, db_type: defaults.RepoDbType exit_mock: Mock,
argparsefactory_mock: Mock,
create_db_from_json_files_mock: Mock,
files: bool,
db_type: defaults.RepoDbType,
fail_argparse: bool,
fail_create: bool,
) -> None: ) -> None:
namespace = Namespace(db_file="db_file", input_dir="input_dir", files=files) namespace = Namespace(db_file="db_file", input_dir="input_dir", files=files)
argparsefactory_mock.json2db.return_value = Mock(parse_args=Mock(return_value=namespace)) if fail_argparse:
argparsefactory_mock.json2db.side_effect = ArgumentTypeError
else:
argparsefactory_mock.json2db.return_value = Mock(parse_args=Mock(return_value=namespace))
if fail_create:
create_db_from_json_files_mock.side_effect = errors.RepoManagementError
cli.json2db() cli.json2db()
create_db_from_json_files.assert_called_once_with( if fail_argparse:
input_path=namespace.input_dir, exit_mock.assert_called_once_with(1)
output_path=namespace.db_file, if fail_create:
db_type=db_type, create_db_from_json_files_mock.assert_called_once_with(
input_path=namespace.input_dir,
output_path=namespace.db_file,
db_type=db_type,
)
exit_mock.assert_called_once_with(1)
if not fail_argparse and not fail_create:
create_db_from_json_files_mock.assert_called_once_with(
input_path=namespace.input_dir,
output_path=namespace.db_file,
db_type=db_type,
)
@mark.integration
@mark.parametrize(
"db",
[
("core"),
("extra"),
("community"),
],
)
def test_transform_databases(db: str, empty_dir: Path, empty_file: Path) -> None:
commands.run_command(
cmd=["db2json", f"/var/lib/pacman/sync/{db}.files", empty_dir.as_posix()],
debug=True,
check=True,
)
commands.run_command(
cmd=["json2db", "-f", empty_dir.as_posix(), empty_file.as_posix()],
debug=True,
check=True,
)
@mark.integration
def test_transform_databases_and_use_with_pacman(empty_dir: Path) -> None:
dbs = ["core", "extra", "community"]
json_paths: Dict[str, Path] = {}
pacman_path = Path(empty_dir / Path("pacman"))
pacman_path.mkdir(parents=True)
db_path = Path(empty_dir / Path("pacman/db_path"))
db_path.mkdir(parents=True)
sync_path = Path(empty_dir / Path("pacman/db_path/sync"))
sync_path.mkdir(parents=True)
cache_path = Path(empty_dir / Path("pacman/cache_path"))
cache_path.mkdir(parents=True)
for db in dbs:
json_path = Path(empty_dir / Path(db))
json_path.mkdir()
json_paths[db] = json_path
for (name, json_path) in json_paths.items():
commands.run_command(
cmd=["db2json", f"/var/lib/pacman/sync/{name}.files", json_path.as_posix()],
debug=True,
check=True,
)
commands.run_command(
cmd=["json2db", json_path.as_posix(), f"{sync_path.as_posix()}/{name}.db"],
debug=True,
check=True,
)
commands.run_command(
cmd=["json2db", "-f", json_path.as_posix(), f"{sync_path.as_posix()}/{name}.files"],
debug=True,
check=True,
)
commands.run_command(
cmd=[
"pacman",
"--cache",
cache_path.as_posix(),
"--logfile",
f"{pacman_path.as_posix()}/pacman.log",
"--dbpath",
db_path.as_posix(),
"-Ss",
"linux",
],
debug=True,
check=True,
)
commands.run_command(
cmd=[
"pacman",
"--cache",
cache_path.as_posix(),
"--logfile",
f"{pacman_path.as_posix()}/pacman.log",
"--dbpath",
db_path.as_posix(),
"-Fl",
"linux",
],
debug=True,
check=True,
) )
from contextlib import nullcontext as does_not_raise
from pathlib import Path
from subprocess import CalledProcessError
from typing import ContextManager, Dict, List, Optional, Union
from pytest import mark, raises
from repo_management import commands
@mark.parametrize("env", [(None), ({"FOO": "BAR"})])
def test__print_env(env: Optional[Dict[str, str]]) -> None:
commands._print_env(env)
@mark.parametrize(
"cmd, env, debug, echo, quiet, check, cwd, expectation",
[
(["ls", "-lah"], {"FOO": "BAR"}, False, False, False, False, None, does_not_raise()),
(["ls", "-lah"], {"FOO": "BAR"}, True, False, False, False, None, does_not_raise()),
(["cd", "-f"], {"FOO": "BAR"}, True, False, False, True, None, raises(CalledProcessError)),
],
)
def test_run_command(
cmd: Union[str, List[str]],
env: Optional[Dict[str, str]],
debug: bool,
echo: bool,
quiet: bool,
check: bool,
cwd: Union[Optional[str], Optional[Path]],
expectation: ContextManager[str],
) -> None:
with expectation:
commands.run_command(cmd=cmd, env=env, debug=debug, echo=echo, quiet=quiet, check=check, cwd=cwd)
...@@ -81,7 +81,7 @@ def test__files_data_to_dict( ...@@ -81,7 +81,7 @@ def test__files_data_to_dict(
"%PGPSIG%\nfoo\n%PROVIDES%\nfoo\nbar\n%REPLACES%\nfoo\nbar\n" "%PGPSIG%\nfoo\n%PROVIDES%\nfoo\nbar\n%REPLACES%\nfoo\nbar\n"
"%SHA256SUM%\nfoo\n%URL%\nfoo\n%VERSION%\nfoo\n" "%SHA256SUM%\nfoo\n%URL%\nfoo\n%VERSION%\nfoo\n"
), ),
raises(ValueError), raises(errors.RepoManagementValidationError),
), ),
("%FOO%\nbar\n", raises(errors.RepoManagementValidationError)), ("%FOO%\nbar\n", raises(errors.RepoManagementValidationError)),
( (
......
...@@ -25,3 +25,9 @@ commands = ...@@ -25,3 +25,9 @@ commands =
poetry run black --check . poetry run black --check .
poetry run flake8 poetry run flake8
poetry run mypy -p repo_management -p tests poetry run mypy -p repo_management -p tests
[testenv:integration]
whitelist_externals = poetry