diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml new file mode 100644 index 00000000..3c041ff3 --- /dev/null +++ b/.github/workflows/pytest.yml @@ -0,0 +1,40 @@ +name: pytest +on: + pull_request: + branches: + - "**" +jobs: + build: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v3 + name: Check out repository + with: + ref: ${{ github.event.pull_request.head.sha }} + - name: Set env variabels + run: | + export CELERY_CONFIG_PATH="./tests/tests_config.yml" + export SSH_USERNAME="root" + export SSH_PASSWORD="root" + - name: Prepare python env + run: | + python -m venv env + env/bin/python -m pip install -U pip + env/bin/python -m pip install -r requirements/celery.txt + - name: Run unit tests (pytest) + run: env/bin/python -m pytest -v --cov-report term-missing:skip-covered + --cov-report xml:/tmp/coverage.xml --junitxml=/tmp/pytest.xml --cov=alts tests/ | tee /tmp/pytest-coverage.txt + - name: Pytest coverage comment + uses: MishaKav/pytest-coverage-comment@main + with: + pytest-coverage-path: /tmp/pytest-coverage.txt + pytest-xml-coverage-path: /tmp/coverage.xml + title: Coverage report for changed files + badge-title: Total coverage + hide-badge: false + hide-report: false + report-only-changed-files: true + hide-comment: false + remove-link-from-badge: false + junitxml-path: /tmp/pytest.xml diff --git a/alts/shared/models.py b/alts/shared/models.py index f73279a5..3e636444 100644 --- a/alts/shared/models.py +++ b/alts/shared/models.py @@ -134,13 +134,13 @@ def broker_url(self) -> str: class AzureResultsConfig(BaseResultsConfig): - azureblockblob_container_name: typing.Optional[str] + azureblockblob_container_name: str azureblockblob_base_path: str = 'celery_result_backend/' - azure_connection_string: typing.Optional[str] + azure_connection_string: str class FilesystemResultsConfig(BaseResultsConfig): - path: typing.Optional[str] + path: str class RedisResultsConfig(BaseResultsConfig, RedisBrokerConfig): @@ -148,22 +148,22 @@ class RedisResultsConfig(BaseResultsConfig, RedisBrokerConfig): class S3ResultsConfig(BaseResultsConfig): - s3_access_key_id: typing.Optional[str] - s3_secret_access_key: typing.Optional[str] - s3_bucket: typing.Optional[str] + s3_access_key_id: str + s3_secret_access_key: str + s3_bucket: str s3_base_path: str = 'celery_result_backend/' - s3_region: typing.Optional[str] - s3_endpoint_url: typing.Optional[str] = None + s3_region: str + s3_endpoint_url: str class AzureLogsConfig(BaseLogsConfig, AzureResultsConfig): - azure_logs_container: typing.Optional[str] + azure_logs_container: str class PulpLogsConfig(BaseLogsConfig): - pulp_host: typing.Optional[str] - pulp_user: typing.Optional[str] - pulp_password: typing.Optional[str] + pulp_host: str + pulp_user: str + pulp_password: str class CeleryConfig(BaseModel): diff --git a/alts/shared/utils/asyncssh.py b/alts/shared/utils/asyncssh.py index ee0caa8e..802e8a2c 100644 --- a/alts/shared/utils/asyncssh.py +++ b/alts/shared/utils/asyncssh.py @@ -1,33 +1,12 @@ import asyncio import logging -import typing from contextlib import asynccontextmanager -from typing import Any, Dict, List, Optional +from traceback import format_exc +from typing import Any, Dict, List, Literal, Optional, Tuple import asyncssh -class AsyncSSHClientSession(asyncssh.SSHClientSession): - def data_received(self, data: str, datatype: asyncssh.DataType): - if datatype == asyncssh.EXTENDED_DATA_STDERR: - logging.error( - 'SSH command stderr:\n%s', - data, - ) - else: - logging.info( - 'SSH command stdout:\n%s', - data, - ) - - def connection_lost(self, exc: typing.Optional[Exception]): - if exc: - logging.exception( - 'SSH session error:', - ) - raise exc - - class AsyncSSHClient: def __init__( self, @@ -38,6 +17,16 @@ def __init__( known_hosts_files: Optional[List[str]] = None, disable_known_hosts_check: bool = False, env_vars: Optional[Dict[str, Any]] = None, + logger: Optional[logging.Logger] = None, + logger_name: str = 'asyncssh-client', + logging_level: Literal[ + 'NOTSET', + 'DEBUG', + 'INFO', + 'WARNING', + 'ERROR', + 'CRITICAL', + ] = 'DEBUG', ): self.username = username self.password = password @@ -51,6 +40,24 @@ def __init__( ) if disable_known_hosts_check: self.known_hosts = None + if not logger: + self.logger = self.setup_logger(logger_name, logging_level) + + def setup_logger( + self, + logger_name: str, + logging_level: str, + ) -> logging.Logger: + logger = logging.getLogger(logger_name) + logger.setLevel(logging_level) + handler = logging.StreamHandler() + handler.setLevel(logging_level) + formatter = logging.Formatter( + '%(asctime)s [%(name)s:%(levelname)s] - %(message)s' + ) + handler.setFormatter(formatter) + logger.addHandler(handler) + return logger @asynccontextmanager async def get_connection(self): @@ -64,26 +71,70 @@ async def get_connection(self): ) as conn: yield conn - def sync_run_command(self, command: str): - try: - asyncio.run(self.async_run_command(command)) - except Exception as exc: - logging.exception('Cannot execute asyncssh command: %s', command) - raise exc + def get_process_results( + self, + result: asyncssh.SSHCompletedProcess, + ) -> Tuple[int, str, str]: + return result.exit_status, result.stdout, result.stderr - async def async_run_command(self, command: str): + def print_process_results( + self, + result: asyncssh.SSHCompletedProcess, + ): + self.logger.debug( + 'Exit code: %s, stdout: %s, stderr: %s', + *self.get_process_results(result), + ) + + async def async_run_command( + self, + command: str, + ) -> Tuple[int, str, str]: async with self.get_connection() as conn: - channel, session = await conn.create_session( - AsyncSSHClientSession, - command, + result = await conn.run(command) + self.print_process_results(result) + return self.get_process_results(result) + + def sync_run_command( + self, + command: str, + ) -> Tuple[int, str, str]: + try: + return asyncio.run(self.async_run_command(command)) + except Exception as exc: + self.logger.exception( + 'Cannot execute asyncssh command: %s', command ) - await channel.wait_closed() + raise exc - async def async_run_commands(self, commands: List[str]): + async def async_run_commands( + self, + commands: List[str], + ) -> Dict[str, Tuple[int, str, str]]: + results = {} async with self.get_connection() as conn: for command in commands: - channel, session = await conn.create_session( - AsyncSSHClientSession, - command, - ) - await channel.wait_closed() + try: + result = await conn.run(command) + except Exception: + self.logger.exception( + 'Cannot execute asyncssh command: %s', + command, + ) + results[command] = (1, '', format_exc()) + continue + self.print_process_results(result) + results[command] = self.get_process_results(result) + return results + + def sync_run_commands( + self, + commands: List[str], + ) -> Dict[str, Tuple[int, str, str]]: + try: + return asyncio.run(self.async_run_commands(commands)) + except Exception as exc: + self.logger.exception( + 'Cannot execute asyncssh commands: %s', commands + ) + raise exc diff --git a/alts/worker/executors/base.py b/alts/worker/executors/base.py index 7d3c4a19..a6d7c6ee 100644 --- a/alts/worker/executors/base.py +++ b/alts/worker/executors/base.py @@ -1,7 +1,7 @@ import logging from datetime import datetime from functools import wraps -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Dict, List, Literal, Optional, Tuple, Union from plumbum import local @@ -34,11 +34,20 @@ def wrapped(self, *args, **kwargs): class BaseExecutor: def __init__( self, + binary_name: str, env_vars: Optional[Dict[str, Any]] = None, - binary_name: Optional[str] = None, ssh_params: Optional[Union[Dict[str, Any], AsyncSSHParams]] = None, timeout: Optional[int] = None, logger: Optional[logging.Logger] = None, + logger_name: str = 'base-executor', + logging_level: Literal[ + 'NOTSET', + 'DEBUG', + 'INFO', + 'WARNING', + 'ERROR', + 'CRITICAL', + ] = 'DEBUG', ) -> None: self.ssh_client = None self.env_vars = {} @@ -56,14 +65,43 @@ def __init__( self.ssh_client = AsyncSSHClient(**ssh_params.dict()) self.logger = logger if not self.logger: - self.logger = logging.getLogger('executor') + self.logger = self.setup_logger(logger_name, logging_level) + self.check_binary_existence() - @measure_stage('run_local_command') - def run_local_command(self, cmd_args: List[str]) -> Tuple[int, str, str]: - if self.binary_name not in local: + def setup_logger( + self, + logger_name: str, + logging_level: str, + ) -> logging.Logger: + logger = logging.getLogger(logger_name) + logger.setLevel(logging_level) + handler = logging.StreamHandler() + handler.setLevel(logging_level) + formatter = logging.Formatter( + '%(asctime)s [%(name)s:%(levelname)s] - %(message)s' + ) + handler.setFormatter(formatter) + logger.addHandler(handler) + return logger + + def check_binary_existence(self): + cmd_args = ['--version'] + func = self.run_local_command + if self.ssh_client: + func = self.ssh_client.sync_run_command + cmd_args = f'{self.binary_name} --version' + try: + exit_code, *_ = func(cmd_args) + except Exception as exc: + self.logger.exception('Cannot check binary existence:') + raise exc + if exit_code != 0: raise FileNotFoundError( - f'Binary {self.binary_name} is not found in PATH on the machine', + f'Binary "{self.binary_name}" is not found in PATH on the machine', ) + + @measure_stage('run_local_command') + def run_local_command(self, cmd_args: List[str]) -> Tuple[int, str, str]: with local.env(**self.env_vars): return local[self.binary_name].run( args=cmd_args, @@ -71,7 +109,44 @@ def run_local_command(self, cmd_args: List[str]) -> Tuple[int, str, str]: ) @measure_stage('run_ssh_command') - def run_ssh_command(self, cmd: str): + def run_ssh_command(self, cmd: str) -> Tuple[int, str, str]: if not self.ssh_client: raise ValueError('SSH params are missing') - return self.ssh_client.sync_run_command(cmd) + return self.ssh_client.sync_run_command(f'{self.binary_name} {cmd}') + + +class BatsExecutor(BaseExecutor): + def __init__( + self, + binary_name: str = 'bats', + env_vars: Optional[Dict[str, Any]] = None, + ssh_params: Optional[Union[Dict[str, Any], AsyncSSHParams]] = None, + timeout: Optional[int] = None, + logger: Optional[logging.Logger] = None, + logger_name: str = 'bats-executor', + logging_level: Literal[ + 'NOTSET', + 'DEBUG', + 'INFO', + 'WARNING', + 'ERROR', + 'CRITICAL', + ] = 'DEBUG', + ): + super().__init__( + binary_name=binary_name, + env_vars=env_vars, + ssh_params=ssh_params, + timeout=timeout, + logger=logger, + logger_name=logger_name, + logging_level=logging_level, + ) + + @measure_stage('run_local_bats') + def run_local_command(self, cmd_args: List[str]) -> Tuple[int, str, str]: + return super().run_local_command(['--tap'] + cmd_args) + + @measure_stage('run_ssh_bats') + def run_ssh_command(self, cmd: str) -> Tuple[int, str, str]: + return super().run_ssh_command(f'{self.binary_name} --tap {cmd}') diff --git a/requirements/base.txt b/requirements/base.txt index cf444a27..700df8ba 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -7,7 +7,7 @@ ruamel.yaml==0.17.30 cryptography==41.0.2 azure-storage-blob==12.16.0 tap.py==3.1 -librabbitmq==2.0.0 +# librabbitmq==2.0.0 requests>=2.25.1 filesplit==3.0.2 pulpcore-client==3.17.3 diff --git a/requirements/celery.txt b/requirements/celery.txt index 4d8f9c84..ab32dd5e 100644 --- a/requirements/celery.txt +++ b/requirements/celery.txt @@ -1,5 +1,6 @@ -r base.txt -pytest==7.3.1 +pytest==7.4.2 +pytest-cov==4.1.0 pytest-check==2.1.4 pytest-testinfra==8.1.0 pytest-tap==3.3 diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 00000000..dc7cab99 --- /dev/null +++ b/tests/README.md @@ -0,0 +1,27 @@ +# Unit tests +## Content +`conftest.py` - a module where setups pytest plugins and contains some base fixtures + +`fixtures/` - a directory with pytest fixtures, new module should be also added in `conftest.pytest_plugins` + +`mock_classes.py` - a module which contain base class with `httpx` request method, setup logic for each test suite and HTTP status codes +## How to run tests locally +1. Create `test-almalinux-bs` database +2. Adjust variables in `vars.env` + ``` + POSTGRES_DB="test-almalinux-bs" + POSTGRES_PASSWORD="password" + DATABASE_URL="postgresql+asyncpg://postgres:password@db/test-almalinux-bs" + SYNC_DATABASE_URL="postgresql+psycopg2://postgres:password@db/test-almalinux-bs" + PULP_DATABASE_URL="postgresql+psycopg2://postgres:password@db/test-almalinux-bs" + ``` +3. Up docker-compose services + ```bash + docker-compose up -d --no-deps web_server db + ``` +4. Run `pytest` within `web_server` container + ```bash + docker-compose run --no-deps --rm web_server bash -c 'source env/bin/activate && pytest -v --ignore alws/' + ``` + - we ignore `alws/` directory because it's contains files which names starts with `test*.py` + diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..f8675dad --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,3 @@ +pytest_plugins = [ + 'tests.fixtures.executors', +] diff --git a/tests/executors/__init__.py b/tests/executors/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/executors/test_executors.py b/tests/executors/test_executors.py new file mode 100644 index 00000000..e74dcbdb --- /dev/null +++ b/tests/executors/test_executors.py @@ -0,0 +1,104 @@ +from contextlib import nullcontext as does_not_raise +from typing import Any, Dict, List + +import pytest +from asyncssh.misc import HostKeyNotVerifiable, PermissionDenied +from plumbum.commands.processes import CommandNotFound + +from alts.worker.executors.base import BaseExecutor + + +class TestBaseExecutor: + @pytest.mark.parametrize( + 'binary_name, exception', + [ + pytest.param( + 'bash', + does_not_raise(), + id='bash', + ), + pytest.param( + 'foo_bar', + pytest.raises(CommandNotFound), + id='unexsistent_binary', + ), + ], + ) + def test_base_executor_init(self, binary_name: str, exception): + with exception: + executor = BaseExecutor(binary_name=binary_name) + assert executor.check_binary_existence() is None + + @pytest.mark.parametrize( + 'binary_name, cmd_args, expected_exit_code', + [ + pytest.param( + 'bash', + ['--version'], + 0, + id='bash', + ), + pytest.param( + 'man', + ['--version'], + 0, + id='man', + ), + ], + ) + def test_base_executor_run_local_command( + self, + binary_name: str, + cmd_args: List[str], + expected_exit_code: int, + ): + executor = BaseExecutor(binary_name=binary_name) + exit_code, *_ = executor.run_local_command(cmd_args) + assert expected_exit_code == exit_code + + @pytest.mark.parametrize( + 'binary_name, additional_ssh_params, exception', + [ + pytest.param( + 'bash', + { + 'disable_known_hosts_check': True, + }, + does_not_raise(), + id='bash', + ), + pytest.param( + 'bash', + {}, + pytest.raises(HostKeyNotVerifiable), + id='untrusted_host_key', + ), + pytest.param( + 'bash', + { + 'password': 'foo_bar', + 'disable_known_hosts_check': True, + }, + pytest.raises(PermissionDenied), + id='permission_denied', + ), + ], + ) + def test_run_ssh_command( + self, + local_ssh_credentials: Dict[str, str], + binary_name: str, + additional_ssh_params: Dict[str, Any], + exception, + ): + ssh_params = { + **local_ssh_credentials, + **additional_ssh_params, + } + with exception: + executor = BaseExecutor( + binary_name=binary_name, + ssh_params=ssh_params, + ) + exit_code, *_ = executor.run_ssh_command('--version') + assert exit_code == 0 diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/fixtures/executors.py b/tests/fixtures/executors.py new file mode 100644 index 00000000..d490acff --- /dev/null +++ b/tests/fixtures/executors.py @@ -0,0 +1,12 @@ +import os + +import pytest + + +@pytest.fixture(scope='session') +def local_ssh_credentials(): + return { + 'host': os.getenv('SSH_HOST', 'localhost'), + 'username': os.getenv('SSH_USERNAME', 'root'), + 'password': os.getenv('SSH_PASSWORD', 'root'), + } diff --git a/tests/runners/test_docker_runner.py b/tests/runners/test_docker_runner.py index 2fc39a73..dd089110 100644 --- a/tests/runners/test_docker_runner.py +++ b/tests/runners/test_docker_runner.py @@ -1,82 +1,67 @@ -from unittest import TestCase +from typing import Tuple -from ddt import ddt -from ddt import data, unpack -# from pyfakefs.fake_filesystem_unittest import TestCase +import pytest from alts.worker.runners import DockerRunner -fedora_runner_params = ('test_id_1', 'fedora', '33') -centos_8_runner_params = ('test_id_2', 'centos', 8) -centos_7_runner_params = ('test_id_3', 'centos', 7) -ubuntu_runner_params = ('test_id_4', 'ubuntu', '20.04') -debian_runner_params = ('test_id_5', 'debian', '11.0') -almalinux_runner_params = ('test_id_6', 'almalinux', '8.3') +# from pyfakefs.fake_filesystem_unittest import TestCase -basics_data = ( - ( - centos_8_runner_params, - { - 'ansible_connection_type': 'docker', - 'repositories': [], - 'pkg_manager': 'dnf' - } - ), - ( - centos_7_runner_params, - { - 'ansible_connection_type': 'docker', - 'repositories': [], - 'pkg_manager': 'yum' - } - ), - ( - ubuntu_runner_params, - { - 'ansible_connection_type': 'docker', - 'repositories': [], - 'pkg_manager': 'apt-get' - } - ), - ( - fedora_runner_params, - { - 'ansible_connection_type': 'docker', - 'repositories': [], - 'pkg_manager': 'dnf' - } - ), - ( - debian_runner_params, - { - 'ansible_connection_type': 'docker', - 'repositories': [], - 'pkg_manager': 'apt-get' - } - ), - ( - almalinux_runner_params, - { + +class TestDockerRunner: + @pytest.mark.parametrize( + 'inputs, pkg_manager', + [ + pytest.param( + ('test_id_1', 'fedora', '33'), + 'dnf', + id='fedora_33', + ), + pytest.param( + ('test_id_2', 'centos', '7'), + 'yum', + id='centos_8', + ), + pytest.param( + ('test_id_3', 'centos', '8'), + 'dnf', + id='centos_8', + ), + pytest.param( + ('test_id_4', 'ubuntu', '20.04'), + 'apt-get', + id='ubuntu_20.04', + ), + pytest.param( + ('test_id_5', 'debian', '11.0'), + 'apt-get', + id='debian_11.0', + ), + pytest.param( + ('test_id_6', 'almalinux', '8.3'), + 'dnf', + id='almalinux_8.3', + ), + ], + ) + def test_docker_runner_init( + self, + inputs: Tuple[str, str, str], + pkg_manager: str, + ): + expected = { 'ansible_connection_type': 'docker', 'repositories': [], - 'pkg_manager': 'dnf' + 'pkg_manager': pkg_manager, } - ), -) - - -@ddt -class TestDockerRunner(TestCase): - - @data(*basics_data) - @unpack - def test_basics(self, inputs: tuple, expected: dict): runner = DockerRunner(*inputs) - self.assertIsInstance(runner.dist_name, str) - self.assertIsInstance(runner.dist_version, str) - for attribute in ('ansible_connection_type', 'repositories', - 'pkg_manager'): - self.assertEqual(getattr(runner, attribute), expected[attribute]) + assert isinstance(runner.dist_name, str) + assert isinstance(runner.dist_version, str) + for attribute in ( + 'ansible_connection_type', + 'repositories', + 'pkg_manager', + ): + assert getattr(runner, attribute) == expected[attribute] # def setUp(self) -> None: # self.patcher = patch('os.stat', MagicMock()) diff --git a/tests/tests_config.yaml b/tests/tests_config.yaml new file mode 100644 index 00000000..317140a3 --- /dev/null +++ b/tests/tests_config.yaml @@ -0,0 +1,35 @@ +--- +use_ssl: false +ssl_config: + security_cert_store: "/etc/ssl/certs/*.crt" + security_key: "" + security_certificate: "" + broker_ca_certificates: "" + security_digest: "sha256" +broker_config: + use_ssl: false + rabbitmq_host: "rabbitmq" + rabbitmq_port: 5672 + rabbitmq_ssl_port: 5671 + rabbitmq_user: "user" + rabbitmq_password: "password" + rabbitmq_vhost: "test_system" +results_backend_config: + path: "file:///tmp/" +task_default_queue: "default" +task_acks_late: true +task_track_started: true +artifacts_root_directory: "test_system_artifacts" +worker_prefetch_multiplier: 1 +jwt_secret: "secret" +hashing_algorithm: "HS256" +opennebula_rpc_endpoint: +opennebula_username: +opennebula_password: +opennebula_vm_group: +bs_host: "http://web_server:8000" +bs_token: "token" +logs_uploader_config: + pulp_host: "http://pulp" + pulp_user: "user" + pulp_password: "password"