diff --git a/.github/workflows/auto_cherry_pick.yml b/.github/workflows/auto_cherry_pick.yml index 0ce0b8e9956..301d9181b9d 100644 --- a/.github/workflows/auto_cherry_pick.yml +++ b/.github/workflows/auto_cherry_pick.yml @@ -3,7 +3,7 @@ name: auto_cherry_pick_commits on: pull_request_target: - types: [closed, labeled] + types: [closed] # Github & Parent PR Env vars env: @@ -23,11 +23,12 @@ jobs: prt_comment: ${{steps.fc.outputs.comment-body}} steps: - name: Find Comment - uses: peter-evans/find-comment@v2 + uses: peter-evans/find-comment@v3 id: fc with: issue-number: ${{ env.number }} body-includes: "trigger: test-robottelo" + direction: last # Auto CherryPicking and Failure Recording auto-cherry-pick: @@ -40,6 +41,12 @@ jobs: label: ${{ github.event.pull_request.labels.*.name }} steps: + # Needed to avoid out-of-memory error + - name: Set Swap Space + uses: pierotofy/set-swap-space@master + with: + swap-size-gb: 10 + ## Robottelo Repo Checkout - uses: actions/checkout@v4 if: ${{ startsWith(matrix.label, '6.') && matrix.label != github.base_ref }} @@ -69,12 +76,13 @@ jobs: - name: Add Parent PR's PRT comment to Auto_Cherry_Picked PR's id: add-parent-prt-comment - if: ${{ always() && steps.cherrypick.outcome == 'success' }} - uses: mshick/add-pr-comment@v2 + if: ${{ always() && needs.find-the-parent-prt-comment.outputs.prt_comment != '' && steps.cherrypick.outcome == 'success' }} + uses: thollander/actions-comment-pull-request@v2 with: - issue: ${{ steps.cherrypick.outputs.number }} - message: ${{ needs.find-the-parent-prt-comment.outputs.prt_comment }} - repo-token: ${{ secrets.CHERRYPICK_PAT }} + message: | + ${{ needs.find-the-parent-prt-comment.outputs.prt_comment }} + pr_number: ${{ steps.cherrypick.outputs.number }} + GITHUB_TOKEN: ${{ secrets.CHERRYPICK_PAT }} - name: is autoMerging enabled for Auto CherryPicked PRs ? if: ${{ always() && steps.cherrypick.outcome == 'success' && contains(github.event.pull_request.labels.*.name, 'AutoMerge_Cherry_Picked') }} @@ -89,10 +97,25 @@ jobs: labels: ["AutoMerge_Cherry_Picked"] }) - ## Failure Logging to issues and GChat Group + - name: Check if cherrypick pr is created + id: search_pr + if: always() + run: | + PR_TITLE="[${{ matrix.label }}] ${{ env.title }}" + API_URL="https://api.github.com/repos/${{ github.repository }}/pulls?state=open" + PR_SEARCH_RESULT=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" "$API_URL" | jq --arg title "$PR_TITLE" '.[] | select(.title == $title)') + if [ -n "$PR_SEARCH_RESULT" ]; then + echo "pr_found=true" >> $GITHUB_OUTPUT + echo "PR is Found with title $PR_TITLE" + else + echo "pr_found=false" >> $GITHUB_OUTPUT + echo "PR is not Found with title $PR_TITLE" + fi + + ## Failure Logging to issues - name: Create Github issue on cherrypick failure id: create-issue - if: ${{ always() && steps.cherrypick.outcome != 'success' && startsWith(matrix.label, '6.') && matrix.label != github.base_ref }} + if: ${{ always() && steps.search_pr.outputs.pr_found == 'false' && steps.cherrypick.outcome != 'success' && startsWith(matrix.label, '6.') && matrix.label != github.base_ref }} uses: dacbd/create-issue-action@main with: token: ${{ secrets.CHERRYPICK_PAT }} diff --git a/.github/workflows/prt_labels.yml b/.github/workflows/prt_labels.yml index 311516fa682..072a46ba196 100644 --- a/.github/workflows/prt_labels.yml +++ b/.github/workflows/prt_labels.yml @@ -1,14 +1,14 @@ name: Remove the PRT label, for the new commit on: - pull_request: + pull_request_target: types: ["synchronize"] jobs: prt_labels_remover: name: remove the PRT label when amendments or new commits added to PR runs-on: ubuntu-latest - if: "(contains(github.event.pull_request.labels.*.name, 'PRT-Passed') || contains(github.event.pull_request.labels.*.name, 'PRT-Failed'))" + if: "(contains(github.event.pull_request.labels.*.name, 'PRT-Passed'))" steps: - name: Avoid the race condition as PRT result will be cleaned run: | @@ -23,7 +23,7 @@ jobs: wait-interval: 2 count: 5 - - name: remove the PRT Passed/Failed label, for new commit + - name: remove the PRT Passed label, for new commit if: always() && ${{steps.prt.outputs.result}} == 'not_found' uses: actions/github-script@v7 with: @@ -35,7 +35,7 @@ jobs: repo: context.repo.repo, issue_number: prNumber, }); - const labelsToRemove = ['PRT-Failed', 'PRT-Passed']; + const labelsToRemove = ['PRT-Passed']; const labelsToRemoveFiltered = labelsToRemove.filter(label => issue.data.labels.some(({ name }) => name === label)); if (labelsToRemoveFiltered.length > 0) { await Promise.all(labelsToRemoveFiltered.map(async label => { diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 47963adca8a..382a20b0e48 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -8,6 +8,7 @@ on: env: PYCURL_SSL_LIBRARY: openssl ROBOTTELO_BUGZILLA__API_KEY: ${{ secrets.BUGZILLA_KEY }} + ROBOTTELO_JIRA__API_KEY: ${{ secrets.JIRA_KEY }} jobs: codechecks: @@ -15,7 +16,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.10', '3.11'] + python-version: ['3.10', '3.11', '3.12'] steps: - name: Checkout Robottelo uses: actions/checkout@v4 @@ -41,9 +42,6 @@ jobs: cp broker_settings.yaml.example broker_settings.yaml cp .env.example .env - - name: Pre Commit Checks - uses: pre-commit/action@v3.0.0 - - name: Collect Tests run: | # To skip vault login in pull request checks diff --git a/.github/workflows/weekly.yml b/.github/workflows/weekly.yml index e767f188ab5..92fa9f329a9 100644 --- a/.github/workflows/weekly.yml +++ b/.github/workflows/weekly.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.9] + python-version: [3.12] steps: - name: Checkout Robottelo uses: actions/checkout@v4 @@ -55,6 +55,7 @@ jobs: id: cscheck env: ROBOTTELO_BUGZILLA__API_KEY: ${{ secrets.BUGZILLA_KEY }} + ROBOTTELO_JIRA__API_KEY: ${{ secrets.JIRA_KEY }} - name: Customer scenario status run: | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c86df504763..dd6543a0af9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,29 +1,33 @@ # configuration for pre-commit git hooks +ci: + autofix_prs: false # disable autofixing PRs + repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 - hooks: - - id: trailing-whitespace - exclude: tests/foreman/data/ - - id: check-yaml - - id: debug-statements -- repo: https://github.com/psf/black - rev: 22.10.0 - hooks: - - id: black -- repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.277 - hooks: - - id: ruff - args: [--fix, --exit-non-zero-on-fix] -- repo: local - hooks: - - id: fix-uuids - name: Robottelo Custom Fix UUIDs script - description: This hook runs the scripts/fix_uuids.sh script - language: script - entry: scripts/fix_uuids.sh - verbose: true - types: [text] - require_serial: true + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: trailing-whitespace + exclude: tests/foreman/data/ + - id: check-yaml + - id: debug-statements + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.4.1 + hooks: + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + - id: ruff-format + - repo: local + hooks: + - id: fix-uuids + name: Robottelo Custom Fix UUIDs script + description: This hook runs the scripts/fix_uuids.sh script + language: script + entry: scripts/fix_uuids.sh + verbose: true + types: [text] + require_serial: true + - repo: https://github.com/gitleaks/gitleaks + rev: v8.18.0 + hooks: + - id: gitleaks diff --git a/conf/fam.yaml.template b/conf/fam.yaml.template new file mode 100644 index 00000000000..079225d0048 --- /dev/null +++ b/conf/fam.yaml.template @@ -0,0 +1,101 @@ +FAM: + SERVER: + # Parameter for all tests + foreman_username: admin + foreman_password: "changeme" + foreman_server_url: "https://foreman.example.com" + foreman_validate_certs: false + + foreman_proxy: "foreman.example.com" + + # Parameter for snapshot test + snapshot_host_name: "test_host" + + # Parameter for job invocation test + foreman_host: "foreman.example.com" + + # Parameter for subscription_manifest test + subscription_manifest_path: "data/manifest_foreman-ansible-modules.zip" + + # Parameter for redhat_manifest test + manifest_name: "katello.example.com" + rhsm_username: "john-smith" + rhsm_password: "changeme" + rhsm_pool_id: 8a85f99a7db4827d017dc512fcad00b0 + rhsm_validate_certs: false + + # Parameter for scc_product test + scc_account_name_for_scc_product: testaccount + scc_account_login_for_scc_product: testuser + scc_account_password_for_scc_product: testpass + + # Parameter for external_usergroup testing + auth_source_ldap_host: ldap.example.com + auth_source_ldap_account: ansible + auth_source_ldap_account_password: pass + auth_source_ldap_base_dn: dc=example,dc=com + auth_source_ldap_attr_login: uid + auth_source_ldap_groups_base: cn=groups,cn=accounts,dc=example,dc=com + external_usergroup_name: "admins" + + COMPUTE_PROFILE: + libvirt: + compute_resource: + name: libvirt-cr01 + organizations: + - Test Organization + locations: + - Test Location + params: + url: qemu+ssh://libvirtuser@localhost/system + compute_profile: + name: app-small + attrs: + cpus: 1 + memory: 2147483648 + nics_attributes: + 0: + type: bridge + bridge: "" + model: virtio + volumes_attributes: + 0: + pool_name: default + capacity: 10G + allocation: 0G + format_type: raw + ovirt: + compute_resource: + name: ovirt-cr01 + organizations: + - Test Organization + locations: + - Test Location + params: + url: "https://ovirt.example.com/ovirt-engine/api" + user: compute-user@internal + password: ovirtcompute123 + use_v4: true + datacenter: c1479626-99a2-44eb-8401-14b5630f417f + ovirt_quota: 502a76bb-a3fe-42f1-aed6-2a7c892a6786 + compute_profile: + name: app-small + attrs: + cluster: Devel-Infra + cores: 2 + sockets: 1 + memory: 1073741824 + ha: 0 + interfaces_attributes: + 0: + name: "" + network: ovirtmgmt + interface: virtio + volumes_attributes: + 0: + size_gb: '16' + storage_domain: 'Local-IBM' + preallocate: '1' + wipe_after_delete: '0' + interface: 'virtio_scsi' + bootable: 'true' diff --git a/conf/jira.yaml.template b/conf/jira.yaml.template new file mode 100644 index 00000000000..e76ac35f157 --- /dev/null +++ b/conf/jira.yaml.template @@ -0,0 +1,5 @@ +JIRA: + # url default value is set to 'https://issues.redhat.com' even if not provided. + URL: https://issues.redhat.com + # Provide api_key to access Jira REST API + API_KEY: replace-with-jira-api-key diff --git a/conf/oscap.yaml.template b/conf/oscap.yaml.template index bfeec7103fb..add2477848e 100644 --- a/conf/oscap.yaml.template +++ b/conf/oscap.yaml.template @@ -1,2 +1,6 @@ OSCAP: - CONTENT_PATH: /usr/share/xml/scap/ssg/content/ssg-rhel7-ds.xml + RHEL_MAJOR_VERSION: "@jinja {{this.server.version.rhel_version | int }}" + CONTENT_PATH: '@format /usr/share/xml/scap/ssg/content/ssg-rhel{this.oscap.rhel_major_version}-ds.xml' + # see: robottelo/constants/__init__.py OSCAP_PROFILE + PROFILE: '@format security{this.oscap.rhel_major_version}' + CONTENT_DIR: /usr/share/xml/scap/ssg/content diff --git a/conf/provisioning.yaml.template b/conf/provisioning.yaml.template index 308686cf84e..d2e547d91be 100644 --- a/conf/provisioning.yaml.template +++ b/conf/provisioning.yaml.template @@ -3,3 +3,5 @@ PROVISIONING: HOST_ROOT_PASSWORD: HOST_SSH_KEY_PRIV: HOST_SSH_KEY_PUB: + PROVISIONING_SAT_WORKFLOW: + PROVISIONING_HOST_WORKFLOW: diff --git a/conftest.py b/conftest.py index f54b9cce479..c81b56a0c93 100644 --- a/conftest.py +++ b/conftest.py @@ -1,4 +1,5 @@ """Global Configurations for py.test runner""" + import pytest pytest_plugins = [ diff --git a/docs/conf.py b/docs/conf.py index b143f7ac6fe..02ef4d4298a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -4,6 +4,7 @@ http://sphinx-doc.org/config.html """ + import builtins import os import sys diff --git a/pyproject.toml b/pyproject.toml index 72a8e2b8f3a..ff201b018ed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,22 +1,14 @@ -[tool.black] -line-length = 100 -skip-string-normalization = true -include = '\.pyi?$' -exclude = ''' -/( - \.git - | \.hg - | \.mypy_cache - | \.venv - | _build - | buck-out - | build - | dist -)/ -''' - [tool.ruff] target-version = "py311" +# Allow lines to be as long as 100. +line-length = 100 +exclude = [".git", ".hg", ".mypy_cache", ".venv", "_build", "buck-out", "build", "dist"] + +[tool.ruff.format] +# Preserve quotes +quote-style = "preserve" # TODO: change to "single" when flake8-quotes is enabled + +[tool.ruff.lint] fixable = ["ALL"] select = [ @@ -40,21 +32,21 @@ ignore = [ "PT005", # pytest no underscrore prefix for return fixtures ] -[tool.ruff.isort] +[tool.ruff.lint.isort] force-sort-within-sections = true known-first-party = [ "robottelo", ] combine-as-imports = true -[tool.ruff.flake8-pytest-style] +[tool.ruff.lint.flake8-pytest-style] fixture-parentheses = false mark-parentheses = false -[tool.ruff.flake8-quotes] +[tool.ruff.lint.flake8-quotes] inline-quotes = "single" -[tool.ruff.mccabe] +[tool.ruff.lint.mccabe] max-complexity = 20 [tool.pytest.ini_options] diff --git a/pytest_fixtures/component/activationkey.py b/pytest_fixtures/component/activationkey.py index e2a68959d29..0be96945b99 100644 --- a/pytest_fixtures/component/activationkey.py +++ b/pytest_fixtures/component/activationkey.py @@ -8,7 +8,7 @@ @pytest.fixture(scope='module') def module_activation_key(module_entitlement_manifest_org, module_target_sat): """Create activation key using default CV and library environment.""" - module_target_sat.api.ActivationKey( + return module_target_sat.api.ActivationKey( content_view=module_entitlement_manifest_org.default_content_view.id, environment=module_entitlement_manifest_org.library.id, organization=module_entitlement_manifest_org, diff --git a/pytest_fixtures/component/http_proxy.py b/pytest_fixtures/component/http_proxy.py index 8c6095092dd..a98e7409699 100644 --- a/pytest_fixtures/component/http_proxy.py +++ b/pytest_fixtures/component/http_proxy.py @@ -1,6 +1,13 @@ import pytest from robottelo.config import settings +from robottelo.hosts import ProxyHost + + +@pytest.fixture(scope='session') +def session_auth_proxy(session_target_sat): + """Instantiates authenticated HTTP proxy as a session-scoped fixture""" + return ProxyHost(settings.http_proxy.auth_proxy_url) @pytest.fixture diff --git a/pytest_fixtures/component/oscap.py b/pytest_fixtures/component/oscap.py index e8a7d230603..07dfd7a51e6 100644 --- a/pytest_fixtures/component/oscap.py +++ b/pytest_fixtures/component/oscap.py @@ -5,7 +5,7 @@ import pytest from robottelo.config import robottelo_tmp_dir, settings -from robottelo.constants import OSCAP_PROFILE, OSCAP_TAILORING_FILE, DataFile +from robottelo.constants import OSCAP_TAILORING_FILE, DataFile @pytest.fixture(scope="session") @@ -35,11 +35,10 @@ def scap_content(import_ansible_roles, module_target_sat): ) scap_id = scap_info['id'] scap_info = entities.ScapContents(id=scap_id).read() - scap_profile_id = [ profile['id'] for profile in scap_info.scap_content_profiles - if OSCAP_PROFILE['security7'] in profile['title'] + if module_target_sat.OSCAP['dsrhel'] in profile['title'] ][0] return { "title": title, diff --git a/pytest_fixtures/component/provision_capsule_pxe.py b/pytest_fixtures/component/provision_capsule_pxe.py index 4360ba6e4bd..dc8dd48471f 100644 --- a/pytest_fixtures/component/provision_capsule_pxe.py +++ b/pytest_fixtures/component/provision_capsule_pxe.py @@ -35,7 +35,7 @@ def capsule_provisioning_sat( sat = module_target_sat provisioning_domain_name = f"{gen_string('alpha').lower()}.foo" broker_data_out = Broker().execute( - workflow='configure-install-sat-provisioning-rhv', + workflow=settings.provisioning.provisioning_sat_workflow, artifacts='last', target_vlan_id=settings.provisioning.vlan_id, target_host=module_capsule_configured.name, diff --git a/pytest_fixtures/component/provision_pxe.py b/pytest_fixtures/component/provision_pxe.py index beef717d94d..18c98242fc7 100644 --- a/pytest_fixtures/component/provision_pxe.py +++ b/pytest_fixtures/component/provision_pxe.py @@ -152,7 +152,7 @@ def module_provisioning_sat( provisioning_domain_name = f"{gen_string('alpha').lower()}.foo" broker_data_out = Broker().execute( - workflow='configure-install-sat-provisioning-rhv', + workflow=settings.provisioning.provisioning_sat_workflow, artifacts='last', target_vlan_id=settings.provisioning.vlan_id, target_host=sat.name, @@ -223,7 +223,7 @@ def provisioning_host(module_ssh_key_file, pxe_loader): "" # TODO: Make this an optional fixture parameter (update vm_firmware when adding this) ) with Broker( - workflow="deploy-configure-pxe-provisioning-host-rhv", + workflow=settings.provisioning.provisioning_host_workflow, host_class=ContentHost, target_vlan_id=vlan_id, target_vm_firmware=pxe_loader.vm_firmware, @@ -245,7 +245,7 @@ def provision_multiple_hosts(module_ssh_key_file, pxe_loader, request): "" # TODO: Make this an optional fixture parameter (update vm_firmware when adding this) ) with Broker( - workflow="deploy-configure-pxe-provisioning-host-rhv", + workflow=settings.provisioning.provisioning_host_workflow, host_class=ContentHost, _count=getattr(request, 'param', 2), target_vlan_id=vlan_id, diff --git a/pytest_fixtures/component/provision_vmware.py b/pytest_fixtures/component/provision_vmware.py index 385f49bae38..0fb9f84ff38 100644 --- a/pytest_fixtures/component/provision_vmware.py +++ b/pytest_fixtures/component/provision_vmware.py @@ -1,5 +1,6 @@ from fauxfactory import gen_string import pytest +from wrapanapi import VMWareSystem from robottelo.config import settings @@ -13,6 +14,17 @@ def vmware(request): return versions[getattr(request, 'param', 'vmware8')] +@pytest.fixture +def vmwareclient(vmware): + vmwareclient = VMWareSystem( + hostname=vmware.hostname, + username=settings.vmware.username, + password=settings.vmware.password, + ) + yield vmwareclient + vmwareclient.disconnect() + + @pytest.fixture(scope='module') def module_vmware_cr(module_provisioning_sat, module_sca_manifest_org, module_location, vmware): return module_provisioning_sat.sat.api.VMWareComputeResource( diff --git a/pytest_fixtures/component/repository.py b/pytest_fixtures/component/repository.py index b7a26560980..ce49a1d7f88 100644 --- a/pytest_fixtures/component/repository.py +++ b/pytest_fixtures/component/repository.py @@ -245,3 +245,25 @@ def module_repos_collection_with_manifest( ) _repos_collection.setup_content(module_entitlement_manifest_org.id, module_lce.id) return _repos_collection + + +@pytest.fixture +def function_repos_collection_with_manifest( + request, target_sat, function_sca_manifest_org, function_lce +): + """This fixture and its usage is very similar to repos_collection fixture above with extra + setup_content and uploaded manifest capabilities using function_lce and + function_sca_manifest_org fixtures + """ + repos = getattr(request, 'param', []) + repo_distro, repos = _simplify_repos(request, repos) + _repos_collection = target_sat.cli_factory.RepositoryCollection( + distro=repo_distro, + repositories=[ + getattr(target_sat.cli_factory, repo_name)(**repo_params) + for repo in repos + for repo_name, repo_params in repo.items() + ], + ) + _repos_collection.setup_content(function_sca_manifest_org.id, function_lce.id) + return _repos_collection diff --git a/pytest_fixtures/component/taxonomy.py b/pytest_fixtures/component/taxonomy.py index e6ac87357cd..c7f53011a84 100644 --- a/pytest_fixtures/component/taxonomy.py +++ b/pytest_fixtures/component/taxonomy.py @@ -23,14 +23,14 @@ def default_location(session_target_sat): def current_sat_org(target_sat): """Return the current organization assigned to the Satellite host""" sat_host = target_sat.api.Host().search(query={'search': f'name={target_sat.hostname}'})[0] - return sat_host.organization.read().name + return sat_host.organization.read() @pytest.fixture def current_sat_location(target_sat): """Return the current location assigned to the Satellite host""" sat_host = target_sat.api.Host().search(query={'search': f'name={target_sat.hostname}'})[0] - return sat_host.location.read().name + return sat_host.location.read() @pytest.fixture diff --git a/pytest_fixtures/component/virtwho_config.py b/pytest_fixtures/component/virtwho_config.py index b8970232136..d46b1153490 100644 --- a/pytest_fixtures/component/virtwho_config.py +++ b/pytest_fixtures/component/virtwho_config.py @@ -24,11 +24,7 @@ def org_module(request, default_org, module_sca_manifest_org): @pytest.fixture def org_session(request, session, session_sca): - if 'sca' in request.module.__name__.split('.')[-1]: - org_session = session_sca - else: - org_session = session - return org_session + return session_sca if 'sca' in request.module.__name__.split('.')[-1] else session @pytest.fixture diff --git a/pytest_fixtures/core/contenthosts.py b/pytest_fixtures/core/contenthosts.py index 485591ed7d9..89dcbb2dfa8 100644 --- a/pytest_fixtures/core/contenthosts.py +++ b/pytest_fixtures/core/contenthosts.py @@ -4,6 +4,7 @@ The functions in this module are read in the pytest_plugins/fixture_markers.py module All functions in this module will be treated as fixtures that apply the contenthost mark """ + from broker import Broker import pytest @@ -45,6 +46,15 @@ def rhel_contenthost(request): yield host +@pytest.fixture(scope='module') +def module_rhel_contenthost(request): + """A module-level fixture that provides a content host object parametrized""" + # Request should be parametrized through pytest_fixtures.fixture_markers + # unpack params dict + with Broker(**host_conf(request), host_class=ContentHost) as host: + yield host + + @pytest.fixture(params=[{'rhel_version': '7'}]) def rhel7_contenthost(request): """A function-level fixture that provides a rhel7 content host object""" @@ -161,6 +171,16 @@ def rex_contenthost(request, module_org, target_sat, module_ak_with_cv): yield host +@pytest.fixture +def rex_contenthosts(request, module_org, target_sat, module_ak_with_cv): + request.param['no_containers'] = True + with Broker(**host_conf(request), host_class=ContentHost, _count=2) as hosts: + for host in hosts: + repo = settings.repos['SATCLIENT_REPO'][f'RHEL{host.os_version.major}'] + host.register(module_org, None, module_ak_with_cv.name, target_sat, repo=repo) + yield hosts + + @pytest.fixture def katello_host_tools_tracer_host(rex_contenthost, target_sat): """Install katello-host-tools-tracer, create custom @@ -268,8 +288,10 @@ def sat_upgrade_chost(): def custom_host(request): """A rhel content host that passes custom host config through request.param""" deploy_args = request.param - # if 'deploy_rhel_version' is not set, let's default to RHEL 8 - deploy_args['deploy_rhel_version'] = deploy_args.get('deploy_rhel_version', '8') + # if 'deploy_rhel_version' is not set, let's default to what's in content_host.yaml + deploy_args['deploy_rhel_version'] = deploy_args.get( + 'deploy_rhel_version', settings.content_host.default_rhel_version + ) deploy_args['workflow'] = 'deploy-rhel' with Broker(**deploy_args, host_class=Satellite) as host: yield host diff --git a/pytest_fixtures/core/xdist.py b/pytest_fixtures/core/xdist.py index 4d02fe026d0..855cd2b1cfd 100644 --- a/pytest_fixtures/core/xdist.py +++ b/pytest_fixtures/core/xdist.py @@ -1,4 +1,5 @@ """Fixtures specific to or relating to pytest's xdist plugin""" + import random from broker import Broker @@ -24,16 +25,21 @@ def align_to_satellite(request, worker_id, satellite_factory): settings.set("server.hostname", None) on_demand_sat = None - if worker_id in ['master', 'local']: - worker_pos = 0 - else: - worker_pos = int(worker_id.replace('gw', '')) + worker_pos = 0 if worker_id in ["master", "local"] else int(worker_id.replace("gw", "")) # attempt to add potential satellites from the broker inventory file if settings.server.inventory_filter: + logger.info( + f'{worker_id=}: Attempting to add Satellite hosts using inventory filter: ' + f'{settings.server.inventory_filter}' + ) hosts = Satellite.get_hosts_from_inventory(filter=settings.server.inventory_filter) settings.server.hostnames += [host.hostname for host in hosts] + logger.debug( + f'{worker_id=}: {settings.server.xdist_behavior=}, ' + f'{settings.server.hostnames=}, {settings.server.auto_checkin=}' + ) # attempt to align a worker to a satellite if settings.server.xdist_behavior == 'run-on-one' and settings.server.hostnames: settings.set("server.hostname", settings.server.hostnames[0]) @@ -48,14 +54,19 @@ def align_to_satellite(request, worker_id, satellite_factory): settings.set("server.hostname", on_demand_sat.hostname) # if no satellite was received, fallback to balance if not settings.server.hostname: + logger.info( + f'{worker_id=}: No Satellite hostnames were available, ' + 'falling back to balance behavior' + ) settings.set("server.hostname", random.choice(settings.server.hostnames)) if settings.server.hostname: - logger.info( - f'xdist worker {worker_id} was assigned hostname {settings.server.hostname}' - ) + logger.info(f'{worker_id=}: Worker was assigned hostname {settings.server.hostname}') configure_airgun() configure_nailgun() yield if on_demand_sat and settings.server.auto_checkin: + logger.info( + f'{worker_id=}: Checking in on-demand Satellite ' f'{on_demand_sat.hostname}' + ) on_demand_sat.teardown() Broker(hosts=[on_demand_sat]).checkin() diff --git a/pytest_plugins/capsule_n-minus.py b/pytest_plugins/capsule_n-minus.py index f903e239757..9a7edf76b50 100644 --- a/pytest_plugins/capsule_n-minus.py +++ b/pytest_plugins/capsule_n-minus.py @@ -19,7 +19,6 @@ def pytest_addoption(parser): def pytest_collection_modifyitems(items, config): - if not config.getoption('n_minus', False): return diff --git a/pytest_plugins/fixture_markers.py b/pytest_plugins/fixture_markers.py index 6fe2f4df4af..795397bec5d 100644 --- a/pytest_plugins/fixture_markers.py +++ b/pytest_plugins/fixture_markers.py @@ -5,10 +5,12 @@ TARGET_FIXTURES = [ 'rhel_contenthost', + 'module_rhel_contenthost', 'content_hosts', 'module_provisioning_rhel_content', 'capsule_provisioning_rhel_content', 'rex_contenthost', + 'rex_contenthosts', ] diff --git a/pytest_plugins/markers.py b/pytest_plugins/markers.py index b7e0f8f6346..e5d9855a179 100644 --- a/pytest_plugins/markers.py +++ b/pytest_plugins/markers.py @@ -25,6 +25,7 @@ def pytest_configure(config): "include_capsule: For satellite-maintain tests to run on Satellite and Capsule both", "capsule_only: For satellite-maintain tests to run only on Capsules", "manifester: Tests that require manifester", + "ldap: Tests related to ldap authentication", ] markers.extend(module_markers()) for marker in markers: diff --git a/pytest_plugins/metadata_markers.py b/pytest_plugins/metadata_markers.py index 57b12aa5c1f..59b1e6c9e56 100644 --- a/pytest_plugins/metadata_markers.py +++ b/pytest_plugins/metadata_markers.py @@ -7,9 +7,22 @@ from robottelo.config import settings from robottelo.hosts import get_sat_rhel_version from robottelo.logging import collection_logger as logger +from robottelo.utils.issue_handlers.jira import are_any_jira_open FMT_XUNIT_TIME = '%Y-%m-%dT%H:%M:%S' IMPORTANCE_LEVELS = [] +selected = [] +deselected = [] + + +def parse_comma_separated_list(option_value): + if isinstance(option_value, str): + if option_value.lower() == 'true': + return True + if option_value.lower() == 'false': + return False + return [item.strip() for item in option_value.split(',')] + return None def pytest_addoption(parser): @@ -26,6 +39,25 @@ def pytest_addoption(parser): '--team', help='Comma separated list of teams to include in test collection', ) + parser.addoption( + '--blocked-by', + type=parse_comma_separated_list, + nargs='?', + const=True, + default=True, + help='Comma separated list of Jiras to collect tests matching BlockedBy testimony marker. ' + 'If no issue is provided all the tests with BlockedBy testimony marker will be processed ' + 'and deselected if any issue is open.', + ) + parser.addoption( + '--verifies-issues', + type=parse_comma_separated_list, + nargs='?', + const=True, + default=False, + help='Comma separated list of Jiras to collect tests matching Verifies testimony marker. ' + 'If no issue is provided all the tests with Verifies testimony marker will be selected.', + ) def pytest_configure(config): @@ -34,6 +66,8 @@ def pytest_configure(config): 'importance: CaseImportance testimony token, use --importance to filter', 'component: Component testimony token, use --component to filter', 'team: Team testimony token, use --team to filter', + 'blocked_by: BlockedBy testimony token, use --blocked-by to filter', + 'verifies_issues: Verifies testimony token, use --verifies_issues to filter', ]: config.addinivalue_line("markers", marker) @@ -56,6 +90,57 @@ def pytest_configure(config): re.IGNORECASE, ) +blocked_by_regex = re.compile( + # To match :BlockedBy: SAT-32932 + r'\s*:BlockedBy:\s*(?P.*\S*)', + re.IGNORECASE, +) + +verifies_regex = re.compile( + # To match :Verifies: SAT-32932 + r'\s*:Verifies:\s*(?P.*\S*)', + re.IGNORECASE, +) + + +def handle_verification_issues(item, verifies_marker, verifies_issues): + """Handles the logic for deselecting tests based on Verifies testimony token + and --verifies-issues pytest option. + """ + if verifies_issues: + if not verifies_marker: + log_and_deselect(item, '--verifies-issues') + return False + if isinstance(verifies_issues, list): + verifies_args = verifies_marker.args[0] + if all(issue not in verifies_issues for issue in verifies_args): + log_and_deselect(item, '--verifies-issues') + return False + return True + + +def handle_blocked_by(item, blocked_by_marker, blocked_by): + """Handles the logic for deselecting tests based on BlockedBy testimony token + and --blocked-by pytest option. + """ + if isinstance(blocked_by, list): + if not blocked_by_marker: + log_and_deselect(item, '--blocked-by') + return False + if all(issue not in blocked_by for issue in blocked_by_marker.args[0]): + log_and_deselect(item, '--blocked-by') + return False + elif isinstance(blocked_by, bool) and blocked_by_marker: + if blocked_by and are_any_jira_open(blocked_by_marker.args[0]): + log_and_deselect(item, '--blocked-by') + return False + return True + + +def log_and_deselect(item, option): + logger.debug(f'Deselected test {item.nodeid} due to "{option}" pytest option.') + deselected.append(item) + @pytest.hookimpl(tryfirst=True) def pytest_collection_modifyitems(items, config): @@ -81,9 +166,8 @@ def pytest_collection_modifyitems(items, config): importance = [i for i in (config.getoption('importance') or '').split(',') if i != ''] component = [c for c in (config.getoption('component') or '').split(',') if c != ''] team = [a.lower() for a in (config.getoption('team') or '').split(',') if a != ''] - - selected = [] - deselected = [] + verifies_issues = config.getoption('verifies_issues') + blocked_by = config.getoption('blocked_by') logger.info('Processing test items to add testimony token markers') for item in items: item.user_properties.append( @@ -100,6 +184,8 @@ def pytest_collection_modifyitems(items, config): for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module)) if d is not None ] + blocked_by_marks_to_add = [] + verifies_marks_to_add = [] for docstring in item_docstrings: item_mark_names = [m.name for m in item.iter_markers()] # Add marker starting at smallest docstring scope @@ -113,6 +199,18 @@ def pytest_collection_modifyitems(items, config): doc_team = team_regex.findall(docstring) if doc_team and 'team' not in item_mark_names: item.add_marker(pytest.mark.team(doc_team[0].lower())) + doc_verifies = verifies_regex.findall(docstring) + if doc_verifies and 'verifies_issues' not in item_mark_names: + verifies_marks_to_add.extend(str(b.strip()) for b in doc_verifies[-1].split(',')) + doc_blocked_by = blocked_by_regex.findall(docstring) + if doc_blocked_by and 'blocked_by' not in item_mark_names: + blocked_by_marks_to_add.extend( + str(b.strip()) for b in doc_blocked_by[-1].split(',') + ) + if blocked_by_marks_to_add: + item.add_marker(pytest.mark.blocked_by(blocked_by_marks_to_add)) + if verifies_marks_to_add: + item.add_marker(pytest.mark.verifies_issues(verifies_marks_to_add)) # add markers as user_properties so they are recorded in XML properties of the report # pytest-ibutsu will include user_properties dict in testresult metadata @@ -169,7 +267,16 @@ def pytest_collection_modifyitems(items, config): deselected.append(item) continue - selected.append(item) + if verifies_issues or blocked_by: + # Filter tests based on --verifies-issues and --blocked-by pytest options + # and Verifies and BlockedBy testimony tokens. + verifies_marker = item.get_closest_marker('verifies_issues', False) + blocked_by_marker = item.get_closest_marker('blocked_by', False) + if not handle_verification_issues(item, verifies_marker, verifies_issues): + continue + if not handle_blocked_by(item, blocked_by_marker, blocked_by): + continue + selected.append(item) # selected will be empty if no filter option was passed, defaulting to full items list items[:] = selected if deselected else items diff --git a/pytest_plugins/requirements/req_updater.py b/pytest_plugins/requirements/req_updater.py index 664a9bb92a5..37d1aabc573 100644 --- a/pytest_plugins/requirements/req_updater.py +++ b/pytest_plugins/requirements/req_updater.py @@ -3,7 +3,6 @@ class ReqUpdater: - # Installed package name as key and its counterpart in requirements file as value package_deviates = { 'Betelgeuse': 'betelgeuse', diff --git a/pytest_plugins/requirements/update_requirements.py b/pytest_plugins/requirements/update_requirements.py index e2ad8840185..74494427ce3 100644 --- a/pytest_plugins/requirements/update_requirements.py +++ b/pytest_plugins/requirements/update_requirements.py @@ -1,4 +1,5 @@ """Plugin enables pytest to notify and update the requirements""" + from .req_updater import ReqUpdater updater = ReqUpdater() diff --git a/pytest_plugins/sanity_plugin.py b/pytest_plugins/sanity_plugin.py index 1d93a4b45f3..14d89b87d54 100644 --- a/pytest_plugins/sanity_plugin.py +++ b/pytest_plugins/sanity_plugin.py @@ -1,4 +1,4 @@ -""" A sanity testing plugin to assist in executing robottelo tests as sanity tests smartly +"""A sanity testing plugin to assist in executing robottelo tests as sanity tests smartly 1. Make installer test to run first which should set the hostname and all other tests then should run after that diff --git a/requirements-optional.txt b/requirements-optional.txt index e776154c7a1..f44e82e6f04 100644 --- a/requirements-optional.txt +++ b/requirements-optional.txt @@ -1,11 +1,11 @@ # For running tests and checking code quality using these modules. flake8==7.0.0 -pytest-cov==4.1.0 -redis==5.0.3 -pre-commit==3.6.2 +pytest-cov==5.0.0 +redis==5.0.4 +pre-commit==3.7.0 # For generating documentation. -sphinx==7.2.6 +sphinx==7.3.6 sphinx-autoapi==3.0.0 # For 'manage' interactive shell diff --git a/requirements.txt b/requirements.txt index 4826ba6ecb6..9e5c79833d7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,39 +1,33 @@ # Version updates managed by dependabot betelgeuse==1.11.0 -# broker[docker]==0.4.1 - Temporarily disabled, see below -cryptography==42.0.5 -deepdiff==6.7.1 -docker==7.0.0 # Temporary until Broker is back on PyPi +broker[docker]==0.4.9 +cryptography==42.0.7 +deepdiff==7.0.1 dynaconf[vault]==3.2.5 -fauxfactory==3.1.0 -jinja2==3.1.3 +fauxfactory==3.1.1 +jinja2==3.1.4 manifester==0.0.14 navmazing==1.2.2 -paramiko==3.4.0 # Temporary until Broker is back on PyPi productmd==1.38 pyotp==2.9.0 python-box==7.1.1 -pytest==8.0.2 +pytest==8.2.0 +pytest-order==1.2.1 pytest-services==2.2.1 -pytest-mock==3.12.0 -pytest-reportportal==5.4.0 +pytest-mock==3.14.0 +pytest-reportportal==5.4.1 pytest-xdist==3.5.0 pytest-fixturecollection==0.1.2 pytest-ibutsu==2.2.4 PyYAML==6.0.1 requests==2.31.0 tenacity==8.2.3 -testimony==2.3.0 +testimony==2.4.0 wait-for==1.2.0 wrapanapi==3.6.0 # Get airgun, nailgun and upgrade from 6.14.z -git+https://github.com/SatelliteQE/airgun.git@6.14.z#egg=airgun -git+https://github.com/SatelliteQE/nailgun.git@6.14.z#egg=nailgun -# Broker currently is unable to push to PyPi due to [1] and [2] -# In the meantime, we install directly from the repo -# [1] - https://github.com/ParallelSSH/ssh2-python/issues/193 -# [2] - https://github.com/pypi/warehouse/issues/7136 -git+https://github.com/SatelliteQE/broker.git@0.4.7#egg=broker +airgun @ git+https://github.com/SatelliteQE/airgun.git@6.14.z#egg=airgun +nailgun @ git+https://github.com/SatelliteQE/nailgun.git@6.14.z#egg=nailgun --editable . diff --git a/robottelo/cli/acs.py b/robottelo/cli/acs.py index 0368bc8d8d8..f610a5e983a 100644 --- a/robottelo/cli/acs.py +++ b/robottelo/cli/acs.py @@ -21,6 +21,7 @@ update Update an alternate content source. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/activationkey.py b/robottelo/cli/activationkey.py index 415a682c444..8a58b2f7bb2 100644 --- a/robottelo/cli/activationkey.py +++ b/robottelo/cli/activationkey.py @@ -25,6 +25,7 @@ subscriptions List associated subscriptions update Update an activation key """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/admin.py b/robottelo/cli/admin.py index ccae4bf47db..e6a32296400 100644 --- a/robottelo/cli/admin.py +++ b/robottelo/cli/admin.py @@ -12,6 +12,7 @@ Options: -h, --help Print help """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/ansible.py b/robottelo/cli/ansible.py index c9d42dbcb30..d717f8cd4d6 100644 --- a/robottelo/cli/ansible.py +++ b/robottelo/cli/ansible.py @@ -8,6 +8,7 @@ roles Manage ansible roles variables Manage ansible variables """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/architecture.py b/robottelo/cli/architecture.py index 02a9a173cf4..6bad9bf55c4 100644 --- a/robottelo/cli/architecture.py +++ b/robottelo/cli/architecture.py @@ -18,6 +18,7 @@ remove_operatingsystem Disassociate a resource update Update an architecture. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/arfreport.py b/robottelo/cli/arfreport.py index 05e04c0d7c3..a478a0ecfd7 100644 --- a/robottelo/cli/arfreport.py +++ b/robottelo/cli/arfreport.py @@ -16,6 +16,7 @@ list List ARF reports """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/auth.py b/robottelo/cli/auth.py index a6995ce7361..c50cf08fb60 100644 --- a/robottelo/cli/auth.py +++ b/robottelo/cli/auth.py @@ -11,6 +11,7 @@ logout Wipe your credentials status Information about current connections """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/base.py b/robottelo/cli/base.py index 2a3ad35ff4f..2827fdba5e5 100644 --- a/robottelo/cli/base.py +++ b/robottelo/cli/base.py @@ -1,4 +1,5 @@ """Generic base class for cli hammer commands.""" + import re from wait_for import wait_for @@ -132,24 +133,24 @@ def delete(cls, options=None, timeout=None): return cls.execute(cls._construct_command(options), ignore_stderr=True, timeout=timeout) @classmethod - def delete_parameter(cls, options=None): + def delete_parameter(cls, options=None, timeout=None): """ Deletes parameter from record. """ cls.command_sub = 'delete-parameter' - return cls.execute(cls._construct_command(options)) + return cls.execute(cls._construct_command(options), ignore_stderr=False, timeout=timeout) @classmethod - def dump(cls, options=None): + def dump(cls, options=None, timeout=None): """ Displays the content for existing partition table. """ cls.command_sub = 'dump' - return cls.execute(cls._construct_command(options)) + return cls.execute(cls._construct_command(options), ignore_stderr=False, timeout=timeout) @classmethod def _get_username_password(cls, username=None, password=None): diff --git a/robottelo/cli/bootdisk.py b/robottelo/cli/bootdisk.py index 50df399f7f3..f50c817cc57 100644 --- a/robottelo/cli/bootdisk.py +++ b/robottelo/cli/bootdisk.py @@ -14,6 +14,7 @@ host Download host image subnet Download subnet generic image """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/capsule.py b/robottelo/cli/capsule.py index 1e4e4a20b36..07ec8698f8e 100644 --- a/robottelo/cli/capsule.py +++ b/robottelo/cli/capsule.py @@ -19,6 +19,7 @@ refresh-features Refresh capsule features update Update a capsule """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/computeprofile.py b/robottelo/cli/computeprofile.py index b288e9c34c6..2856138f03f 100644 --- a/robottelo/cli/computeprofile.py +++ b/robottelo/cli/computeprofile.py @@ -18,6 +18,7 @@ -h, --help Print help Update a compute resource. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/computeresource.py b/robottelo/cli/computeresource.py index 1026d163787..48823ef1690 100644 --- a/robottelo/cli/computeresource.py +++ b/robottelo/cli/computeresource.py @@ -17,6 +17,7 @@ list List all compute resources. update Update a compute resource. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/content_credentials.py b/robottelo/cli/content_credentials.py index b7582dadd48..6844ef0083f 100644 --- a/robottelo/cli/content_credentials.py +++ b/robottelo/cli/content_credentials.py @@ -16,6 +16,7 @@ list List content credentials update Update a content credential """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/content_export.py b/robottelo/cli/content_export.py index 8d67ddfc032..c868e8483a4 100644 --- a/robottelo/cli/content_export.py +++ b/robottelo/cli/content_export.py @@ -19,6 +19,7 @@ list View content view export histories """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/content_import.py b/robottelo/cli/content_import.py index 18669dd49f8..d0910433dcb 100644 --- a/robottelo/cli/content_import.py +++ b/robottelo/cli/content_import.py @@ -16,6 +16,7 @@ version Imports a content archive to a content view version """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/contentview.py b/robottelo/cli/contentview.py index fb8001f89d7..bac4dee01a9 100644 --- a/robottelo/cli/contentview.py +++ b/robottelo/cli/contentview.py @@ -33,6 +33,7 @@ -h, --help print help """ + from robottelo.cli import hammer from robottelo.cli.base import Base, CLIError diff --git a/robottelo/cli/defaults.py b/robottelo/cli/defaults.py index 7d624930f95..0c7524a65cc 100644 --- a/robottelo/cli/defaults.py +++ b/robottelo/cli/defaults.py @@ -15,6 +15,7 @@ list List all the default parameters providers List all the providers """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/discoveredhost.py b/robottelo/cli/discoveredhost.py index 6404dfc89ec..99c1edffd6b 100644 --- a/robottelo/cli/discoveredhost.py +++ b/robottelo/cli/discoveredhost.py @@ -19,6 +19,7 @@ reboot Reboot a host refresh-facts Refresh the facts of a host """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/discoveryrule.py b/robottelo/cli/discoveryrule.py index 54bb5fc872e..9685132ef88 100644 --- a/robottelo/cli/discoveryrule.py +++ b/robottelo/cli/discoveryrule.py @@ -16,6 +16,7 @@ list List all discovery rules update Update a rule """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/docker.py b/robottelo/cli/docker.py index 01fe51cb786..f12eb949e90 100644 --- a/robottelo/cli/docker.py +++ b/robottelo/cli/docker.py @@ -1,4 +1,5 @@ """Docker related hammer commands""" + from robottelo.cli.base import Base diff --git a/robottelo/cli/domain.py b/robottelo/cli/domain.py index bb449177fcd..601aefc2eb1 100644 --- a/robottelo/cli/domain.py +++ b/robottelo/cli/domain.py @@ -18,6 +18,7 @@ set_parameter Create or update parameter for a domain. update Update a domain. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/environment.py b/robottelo/cli/environment.py index f47e2ba6aaf..1c981216e79 100644 --- a/robottelo/cli/environment.py +++ b/robottelo/cli/environment.py @@ -17,6 +17,7 @@ sc-params List all smart class parameters update Update an environment """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/erratum.py b/robottelo/cli/erratum.py index abe3fa57598..762a678398e 100644 --- a/robottelo/cli/erratum.py +++ b/robottelo/cli/erratum.py @@ -13,6 +13,7 @@ info Show an erratum list List errata """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/fact.py b/robottelo/cli/fact.py index 67a6d99d52a..3521e828b36 100644 --- a/robottelo/cli/fact.py +++ b/robottelo/cli/fact.py @@ -12,6 +12,7 @@ list List all fact values. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/file.py b/robottelo/cli/file.py index 3adb1fe1663..643df551b18 100644 --- a/robottelo/cli/file.py +++ b/robottelo/cli/file.py @@ -13,6 +13,7 @@ info Show a file list List files """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/filter.py b/robottelo/cli/filter.py index 395e44e13d6..595e67584a1 100644 --- a/robottelo/cli/filter.py +++ b/robottelo/cli/filter.py @@ -17,6 +17,7 @@ list List all filters update Update a filter """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/globalparam.py b/robottelo/cli/globalparam.py index 8bca35dc3cb..2dc069a062b 100644 --- a/robottelo/cli/globalparam.py +++ b/robottelo/cli/globalparam.py @@ -14,6 +14,7 @@ list List all common parameters. set Set a global parameter. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/gpgkey.py b/robottelo/cli/gpgkey.py index 4158fa04a70..610640e1c9b 100644 --- a/robottelo/cli/gpgkey.py +++ b/robottelo/cli/gpgkey.py @@ -16,6 +16,7 @@ list List GPG Keys update Update a GPG Key """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/hammer.py b/robottelo/cli/hammer.py index b84e6bf2158..47e22cd7af0 100644 --- a/robottelo/cli/hammer.py +++ b/robottelo/cli/hammer.py @@ -1,4 +1,5 @@ """Helpers to interact with hammer command line utility.""" + import csv import json import re diff --git a/robottelo/cli/host.py b/robottelo/cli/host.py index 05f042f9dde..8121900dda0 100644 --- a/robottelo/cli/host.py +++ b/robottelo/cli/host.py @@ -39,6 +39,7 @@ update Update a host """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/host_registration.py b/robottelo/cli/host_registration.py index 8d0adaececd..61abe2a7d56 100644 --- a/robottelo/cli/host_registration.py +++ b/robottelo/cli/host_registration.py @@ -13,6 +13,7 @@ -h, --help Print help """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/hostcollection.py b/robottelo/cli/hostcollection.py index 839ef48d4f6..16e13d89110 100644 --- a/robottelo/cli/hostcollection.py +++ b/robottelo/cli/hostcollection.py @@ -23,6 +23,7 @@ remove-host Remove hosts from the host collection update Update a host collection """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/hostgroup.py b/robottelo/cli/hostgroup.py index 5be314b1a43..f13f7878901 100644 --- a/robottelo/cli/hostgroup.py +++ b/robottelo/cli/hostgroup.py @@ -22,6 +22,7 @@ set-parameter Create or update parameter for a hostgroup update Update a host group """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/http_proxy.py b/robottelo/cli/http_proxy.py index 08405e8d045..ba6f6b4debd 100644 --- a/robottelo/cli/http_proxy.py +++ b/robottelo/cli/http_proxy.py @@ -16,6 +16,7 @@ Options: -h, --help Print help """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/job_invocation.py b/robottelo/cli/job_invocation.py index e1aeacdc84e..62974e3b072 100644 --- a/robottelo/cli/job_invocation.py +++ b/robottelo/cli/job_invocation.py @@ -15,6 +15,7 @@ rerun Rerun the job """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/job_template.py b/robottelo/cli/job_template.py index 7b072cc0612..d33e77b09ea 100644 --- a/robottelo/cli/job_template.py +++ b/robottelo/cli/job_template.py @@ -16,6 +16,7 @@ list List job templates update Update a job template """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/ldapauthsource.py b/robottelo/cli/ldapauthsource.py index fabb6a9c0bd..1d460528f4f 100644 --- a/robottelo/cli/ldapauthsource.py +++ b/robottelo/cli/ldapauthsource.py @@ -15,6 +15,7 @@ list List all LDAP authentication sources update Update an LDAP authentication source """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/lifecycleenvironment.py b/robottelo/cli/lifecycleenvironment.py index c22d2268746..b6896d80f59 100644 --- a/robottelo/cli/lifecycleenvironment.py +++ b/robottelo/cli/lifecycleenvironment.py @@ -16,6 +16,7 @@ delete Destroy an environment info Show an environment """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/location.py b/robottelo/cli/location.py index aa3dd234970..82d5789139a 100644 --- a/robottelo/cli/location.py +++ b/robottelo/cli/location.py @@ -36,6 +36,7 @@ remove-user Disassociate an user update Update a location """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/medium.py b/robottelo/cli/medium.py index 8372d96a7cf..6958254b07c 100644 --- a/robottelo/cli/medium.py +++ b/robottelo/cli/medium.py @@ -18,6 +18,7 @@ remove_operatingsystem Disassociate a resource update Update a medium. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/model.py b/robottelo/cli/model.py index 419f2ff62a2..453d5a55d18 100644 --- a/robottelo/cli/model.py +++ b/robottelo/cli/model.py @@ -16,6 +16,7 @@ list List all models. update Update a model. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/module_stream.py b/robottelo/cli/module_stream.py index 1dcb8d9ee54..bc0a248e44f 100644 --- a/robottelo/cli/module_stream.py +++ b/robottelo/cli/module_stream.py @@ -13,6 +13,7 @@ info Show a module-stream list List module-streams """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/operatingsys.py b/robottelo/cli/operatingsys.py index 8bf4411d56d..361259d275e 100644 --- a/robottelo/cli/operatingsys.py +++ b/robottelo/cli/operatingsys.py @@ -27,6 +27,7 @@ operating system. update Update an OS. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/org.py b/robottelo/cli/org.py index 38103aff3a5..21d2c7c053b 100644 --- a/robottelo/cli/org.py +++ b/robottelo/cli/org.py @@ -40,6 +40,7 @@ organization. update Update an organization """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/ostreebranch.py b/robottelo/cli/ostreebranch.py index 5d68869978f..f828299133f 100644 --- a/robottelo/cli/ostreebranch.py +++ b/robottelo/cli/ostreebranch.py @@ -14,6 +14,7 @@ list List ostree_branches """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/package.py b/robottelo/cli/package.py index 75533348ad3..df566b735d9 100644 --- a/robottelo/cli/package.py +++ b/robottelo/cli/package.py @@ -13,6 +13,7 @@ info Show a package list List packages """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/partitiontable.py b/robottelo/cli/partitiontable.py index eb0ebd7fc9e..dc15e25563a 100644 --- a/robottelo/cli/partitiontable.py +++ b/robottelo/cli/partitiontable.py @@ -19,6 +19,7 @@ remove_operatingsystem Disassociate a resource update Update a ptable. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/product.py b/robottelo/cli/product.py index 7a7f03eeff4..90e0a534549 100644 --- a/robottelo/cli/product.py +++ b/robottelo/cli/product.py @@ -20,6 +20,7 @@ update Update a product update-proxy Updates an HTTP Proxy for a product """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/proxy.py b/robottelo/cli/proxy.py index c6887940902..a7722b22935 100644 --- a/robottelo/cli/proxy.py +++ b/robottelo/cli/proxy.py @@ -18,6 +18,7 @@ refresh-features Refresh smart proxy features update Update a smart proxy. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/puppet.py b/robottelo/cli/puppet.py index b340fc75a5e..acc502fafc2 100644 --- a/robottelo/cli/puppet.py +++ b/robottelo/cli/puppet.py @@ -14,6 +14,7 @@ list List all puppetclasses. sc-params List all smart class parameters """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/realm.py b/robottelo/cli/realm.py index bbbcc11b0a9..ce89717fc88 100644 --- a/robottelo/cli/realm.py +++ b/robottelo/cli/realm.py @@ -16,6 +16,7 @@ Options: -h, --help print help """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/recurring_logic.py b/robottelo/cli/recurring_logic.py index 71c494c68d7..85c6e7ebba6 100644 --- a/robottelo/cli/recurring_logic.py +++ b/robottelo/cli/recurring_logic.py @@ -11,6 +11,7 @@ info Show recurring logic details list List recurring logics """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/report.py b/robottelo/cli/report.py index 50ca0c1a8b9..e5dadade512 100644 --- a/robottelo/cli/report.py +++ b/robottelo/cli/report.py @@ -15,6 +15,7 @@ list List reports. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/report_template.py b/robottelo/cli/report_template.py index 962c5929bc7..9814525716b 100644 --- a/robottelo/cli/report_template.py +++ b/robottelo/cli/report_template.py @@ -21,6 +21,7 @@ schedule Schedule generating of a report update Update a report template """ + from os import chmod from tempfile import mkstemp diff --git a/robottelo/cli/repository.py b/robottelo/cli/repository.py index 94fa8baa180..084887ae93d 100644 --- a/robottelo/cli/repository.py +++ b/robottelo/cli/repository.py @@ -19,6 +19,7 @@ update Update a repository upload-content Upload content into the repository """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/repository_set.py b/robottelo/cli/repository_set.py index 65dede4e28d..7c2afb6481d 100644 --- a/robottelo/cli/repository_set.py +++ b/robottelo/cli/repository_set.py @@ -19,6 +19,7 @@ info Show a repository list List of repositories """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/role.py b/robottelo/cli/role.py index d83234fc8a0..88eb6a40cd7 100644 --- a/robottelo/cli/role.py +++ b/robottelo/cli/role.py @@ -17,6 +17,7 @@ list List all roles. update Update an role. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/scap_policy.py b/robottelo/cli/scap_policy.py index 7235d2470cf..26479007804 100644 --- a/robottelo/cli/scap_policy.py +++ b/robottelo/cli/scap_policy.py @@ -16,6 +16,7 @@ list List Policies update Update a Policy """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/scap_tailoring_files.py b/robottelo/cli/scap_tailoring_files.py index 0e75af17b46..912c25a639a 100644 --- a/robottelo/cli/scap_tailoring_files.py +++ b/robottelo/cli/scap_tailoring_files.py @@ -17,6 +17,7 @@ list List Tailoring files update Update a Tailoring file """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/scapcontent.py b/robottelo/cli/scapcontent.py index 160da1d0d1e..8e384488d8c 100644 --- a/robottelo/cli/scapcontent.py +++ b/robottelo/cli/scapcontent.py @@ -16,6 +16,7 @@ list List SCAP contents update Update an SCAP content """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/scparams.py b/robottelo/cli/scparams.py index fea0431a5e1..9e661b8f613 100644 --- a/robottelo/cli/scparams.py +++ b/robottelo/cli/scparams.py @@ -18,6 +18,7 @@ variable update Update a smart class parameter """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/settings.py b/robottelo/cli/settings.py index 49777104d4f..13c45776a51 100644 --- a/robottelo/cli/settings.py +++ b/robottelo/cli/settings.py @@ -13,6 +13,7 @@ list List all settings set Update a setting """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/simple_content_access.py b/robottelo/cli/simple_content_access.py index d5e471b824b..6f128a84f17 100644 --- a/robottelo/cli/simple_content_access.py +++ b/robottelo/cli/simple_content_access.py @@ -16,6 +16,7 @@ Simple Content Access enabled """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/sm_advanced.py b/robottelo/cli/sm_advanced.py index 42ba037b811..b6ba5808e00 100644 --- a/robottelo/cli/sm_advanced.py +++ b/robottelo/cli/sm_advanced.py @@ -88,6 +88,7 @@ Options: -h, --help print help """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/sm_advanced_by_tag.py b/robottelo/cli/sm_advanced_by_tag.py index afcc1018ab4..f90758e68fc 100644 --- a/robottelo/cli/sm_advanced_by_tag.py +++ b/robottelo/cli/sm_advanced_by_tag.py @@ -17,6 +17,7 @@ Options: -h, --help print help """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/sm_backup.py b/robottelo/cli/sm_backup.py index 8da59080151..594c2390785 100644 --- a/robottelo/cli/sm_backup.py +++ b/robottelo/cli/sm_backup.py @@ -14,6 +14,7 @@ Options: -h, --help print help """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/sm_health.py b/robottelo/cli/sm_health.py index f8da5ca309c..c023a943892 100644 --- a/robottelo/cli/sm_health.py +++ b/robottelo/cli/sm_health.py @@ -14,6 +14,7 @@ Options: -h, --help print help """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/sm_maintenance_mode.py b/robottelo/cli/sm_maintenance_mode.py index 6cbe43e367d..8e9ef09d67f 100644 --- a/robottelo/cli/sm_maintenance_mode.py +++ b/robottelo/cli/sm_maintenance_mode.py @@ -12,6 +12,7 @@ Options: -h, --help print help """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/sm_packages.py b/robottelo/cli/sm_packages.py index d4674279172..96aa69531bb 100644 --- a/robottelo/cli/sm_packages.py +++ b/robottelo/cli/sm_packages.py @@ -18,6 +18,7 @@ Options: -h, --help print help """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/sm_restore.py b/robottelo/cli/sm_restore.py index 0bd68c9fe61..cad914e5192 100644 --- a/robottelo/cli/sm_restore.py +++ b/robottelo/cli/sm_restore.py @@ -12,6 +12,7 @@ -i, --incremental Restore an incremental backup -h, --help print help """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/sm_service.py b/robottelo/cli/sm_service.py index c2ec5f32ca7..212d33bda19 100644 --- a/robottelo/cli/sm_service.py +++ b/robottelo/cli/sm_service.py @@ -18,6 +18,7 @@ Options: -h, --help print help """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/sm_upgrade.py b/robottelo/cli/sm_upgrade.py index 77712b41593..a30e1582182 100644 --- a/robottelo/cli/sm_upgrade.py +++ b/robottelo/cli/sm_upgrade.py @@ -14,6 +14,7 @@ Options: -h, --help print help """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/srpm.py b/robottelo/cli/srpm.py index 62578bbaa69..689a17c64ef 100644 --- a/robottelo/cli/srpm.py +++ b/robottelo/cli/srpm.py @@ -10,6 +10,7 @@ info Show a SRPM Details list List srpms """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/subnet.py b/robottelo/cli/subnet.py index e0bf4a0d6bd..5fc61ddc561 100644 --- a/robottelo/cli/subnet.py +++ b/robottelo/cli/subnet.py @@ -17,6 +17,7 @@ update Update a subnet """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/subscription.py b/robottelo/cli/subscription.py index 86b57d51d41..916a9068155 100644 --- a/robottelo/cli/subscription.py +++ b/robottelo/cli/subscription.py @@ -18,6 +18,7 @@ upload Upload a subscription manifest """ + from robottelo.cli.base import Base @@ -47,7 +48,7 @@ def refresh_manifest(cls, options=None, timeout=None): return cls.execute(cls._construct_command(options), ignore_stderr=True, timeout=timeout) @classmethod - def manifest_history(cls, options=None): + def manifest_history(cls, options=None, timeout=None): """Provided history for subscription manifest""" cls.command_sub = 'manifest-history' - return cls.execute(cls._construct_command(options)) + return cls.execute(cls._construct_command(options), ignore_stderr=True, timeout=timeout) diff --git a/robottelo/cli/syncplan.py b/robottelo/cli/syncplan.py index 151a72acb84..2966a349fc4 100644 --- a/robottelo/cli/syncplan.py +++ b/robottelo/cli/syncplan.py @@ -16,6 +16,7 @@ list List sync plans update """ + from robottelo.cli.base import Base from robottelo.exceptions import CLIError diff --git a/robottelo/cli/task.py b/robottelo/cli/task.py index b2a25142407..79a19c3034c 100644 --- a/robottelo/cli/task.py +++ b/robottelo/cli/task.py @@ -14,6 +14,7 @@ progress Show the progress of the task resume Resume all tasks paused in error state """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/template.py b/robottelo/cli/template.py index 3729fc4bf76..e2836346a9c 100644 --- a/robottelo/cli/template.py +++ b/robottelo/cli/template.py @@ -20,6 +20,7 @@ remove-operatingsystem Disassociate an operating system update Update a provisioning template """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/template_input.py b/robottelo/cli/template_input.py index cfe548384c3..d8cf782f783 100644 --- a/robottelo/cli/template_input.py +++ b/robottelo/cli/template_input.py @@ -15,6 +15,7 @@ info Show template input details list List template inputs """ + from robottelo.cli.base import Base, CLIError diff --git a/robottelo/cli/template_sync.py b/robottelo/cli/template_sync.py index 72ee70fa00f..e3da9dbf20c 100644 --- a/robottelo/cli/template_sync.py +++ b/robottelo/cli/template_sync.py @@ -39,6 +39,7 @@ prefix The string all imported templates should begin with. repo Override the default repo from settings. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/user.py b/robottelo/cli/user.py index e624a1b5ad4..4a2e6da34d6 100644 --- a/robottelo/cli/user.py +++ b/robottelo/cli/user.py @@ -20,6 +20,7 @@ ssh-keys Managing User SSH Keys. update Update an user. """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/usergroup.py b/robottelo/cli/usergroup.py index cd40ac96bc5..34d57fcc12a 100644 --- a/robottelo/cli/usergroup.py +++ b/robottelo/cli/usergroup.py @@ -20,6 +20,7 @@ remove-user-group Disassociate an user group update Update a user group """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/virt_who_config.py b/robottelo/cli/virt_who_config.py index 4cb6dd1223b..7e2e2e05b6b 100644 --- a/robottelo/cli/virt_who_config.py +++ b/robottelo/cli/virt_who_config.py @@ -19,6 +19,7 @@ list List of virt-who configurations update Update a virt-who configuration """ + from robottelo.cli.base import Base diff --git a/robottelo/cli/webhook.py b/robottelo/cli/webhook.py index c7b93f76d99..eb0e41905b5 100644 --- a/robottelo/cli/webhook.py +++ b/robottelo/cli/webhook.py @@ -13,12 +13,12 @@ Options: -h, --help Print help """ + from robottelo.cli.base import Base, CLIError from robottelo.constants import WEBHOOK_EVENTS, WEBHOOK_METHODS class Webhook(Base): - command_base = 'webhook' @classmethod diff --git a/robottelo/config/validators.py b/robottelo/config/validators.py index f598821839d..1a6a6d7f4aa 100644 --- a/robottelo/config/validators.py +++ b/robottelo/config/validators.py @@ -189,6 +189,10 @@ must_exist=True, ), ], + jira=[ + Validator('jira.url', default='https://issues.redhat.com'), + Validator('jira.api_key', must_exist=True), + ], ldap=[ Validator( 'ldap.basedn', @@ -225,7 +229,12 @@ Validator( 'oscap.content_path', must_exist=True, - ) + ), + Validator( + 'oscap.profile', + default='security7', + must_exist=True, + ), ], osp=[ Validator( diff --git a/robottelo/constants/__init__.py b/robottelo/constants/__init__.py index bca0dbdf9b2..8f6f56a73a5 100644 --- a/robottelo/constants/__init__.py +++ b/robottelo/constants/__init__.py @@ -1,4 +1,5 @@ """Defines various constants""" + from pathlib import Path from box import Box @@ -306,9 +307,9 @@ class Colored(Box): 'kickstart': { 'rhel6': 'Red Hat Enterprise Linux 6 Server (Kickstart)', 'rhel7': 'Red Hat Enterprise Linux 7 Server (Kickstart)', - 'rhel8': 'Red Hat Enterprise Linux 8 for x86_64 - BaseOS (Kickstart)', + 'rhel8_bos': 'Red Hat Enterprise Linux 8 for x86_64 - BaseOS (Kickstart)', 'rhel8_aps': 'Red Hat Enterprise Linux 8 for x86_64 - AppStream (Kickstart)', - 'rhel9': 'Red Hat Enterprise Linux 9 for x86_64 - BaseOS (Kickstart)', + 'rhel9_bos': 'Red Hat Enterprise Linux 9 for x86_64 - BaseOS (Kickstart)', 'rhel9_aps': 'Red Hat Enterprise Linux 9 for x86_64 - AppStream (Kickstart)', }, 'rhel8_bos': 'Red Hat Enterprise Linux 8 for x86_64 - BaseOS (RPMs)', @@ -411,6 +412,7 @@ class Colored(Box): 'reposet': REPOSET['rhsclient8'], 'product': PRDS['rhel8'], 'distro': 'rhel8', + 'releasever': None, 'key': PRODUCT_KEY_SAT_CLIENT, }, 'rhsclient9': { @@ -510,6 +512,7 @@ class Colored(Box): 'reposet': REPOSET['rhst8'], 'product': PRDS['rhel8'], 'distro': 'rhel8', + 'releasever': None, 'key': 'rhst', }, 'kickstart': { @@ -533,7 +536,7 @@ class Colored(Box): 'id': 'rhel-8-for-x86_64-baseos-kickstart', 'name': 'Red Hat Enterprise Linux 8 for x86_64 - BaseOS Kickstart 8.9', 'version': '8.9', - 'reposet': REPOSET['kickstart']['rhel8'], + 'reposet': REPOSET['kickstart']['rhel8_bos'], 'product': PRDS['rhel8'], 'distro': 'rhel8', }, @@ -549,7 +552,7 @@ class Colored(Box): 'id': 'rhel-9-for-x86_64-baseos-kickstart', 'name': 'Red Hat Enterprise Linux 9 for x86_64 - BaseOS Kickstart 9.3', 'version': '9.3', - 'reposet': REPOSET['kickstart']['rhel9'], + 'reposet': REPOSET['kickstart']['rhel9_bos'], 'product': PRDS['rhel9'], 'distro': 'rhel9', }, @@ -752,6 +755,9 @@ class Colored(Box): REAL_RHEL7_0_1_PACKAGE_FILENAME = 'python-pulp-common-2.21.0.2-1.el7sat.noarch.rpm' REAL_RHEL7_0_2_PACKAGE_NAME = 'python2-psutil' # for RHBA-2021:1314 REAL_RHEL7_0_2_PACKAGE_FILENAME = 'python2-psutil-5.7.2-2.el7sat.x86_64.rpm' +REAL_RHEL8_1_PACKAGE_NAME = 'puppet-agent' # for RHSA-2022:4867 +REAL_RHEL8_1_PACKAGE_FILENAME = 'puppet-agent-6.19.1-1.el8sat.x86_64' +REAL_RHEL8_2_PACKAGE_FILENAME = 'puppet-agent-6.26.0-1.el8sat.x86_64' FAKE_0_CUSTOM_PACKAGE_GROUP_NAME = 'birds' FAKE_3_YUM_OUTDATED_PACKAGES = [ 'acme-package-1.0.1-1.noarch', @@ -806,6 +812,7 @@ class Colored(Box): FAKE_2_ERRATA_ID = 'RHSA-2012:0055' # for FAKE_1_CUSTOM_PACKAGE REAL_RHEL7_0_ERRATA_ID = 'RHBA-2020:3615' # for REAL_RHEL7_0_0_PACKAGE REAL_RHEL7_1_ERRATA_ID = 'RHBA-2017:0395' # tcsh bug fix update +REAL_RHEL8_1_ERRATA_ID = 'RHSA-2022:4867' # for REAL_RHEL8_1_PACKAGE FAKE_1_YUM_REPOS_COUNT = 32 FAKE_3_YUM_REPOS_COUNT = 78 FAKE_9_YUM_SECURITY_ERRATUM = [ @@ -1559,6 +1566,7 @@ class Colored(Box): 'rhel6_content': 'Red Hat rhel6 default content', 'rhel7_content': 'Red Hat rhel7 default content', 'rhel8_content': 'Red Hat rhel8 default content', + 'rhel9_content': 'Red Hat rhel9 default content', 'rhel_firefox': 'Red Hat firefox default content', } @@ -1566,7 +1574,8 @@ class Colored(Box): 'c2s_rhel6': 'C2S for Red Hat Enterprise Linux 6', 'dsrhel6': 'DISA STIG for Red Hat Enterprise Linux 6', 'dsrhel7': 'DISA STIG for Red Hat Enterprise Linux 7', - 'dsrhel8': '[DRAFT] DISA STIG for Red Hat Enterprise Linux 8', + 'dsrhel8': 'DISA STIG for Red Hat Enterprise Linux 8', + 'dsrhel9': 'DISA STIG for Red Hat Enterprise Linux 9', 'esp': 'Example Server Profile', 'rhccp': 'Red Hat Corporate Profile for Certified Cloud Providers (RH CCP)', 'firefox': 'Mozilla Firefox STIG', @@ -1646,63 +1655,20 @@ class Colored(Box): 'Viewer', ] -BOOKMARK_ENTITIES = [ +BOOKMARK_ENTITIES_SELECTION = [ { 'name': 'ActivationKey', 'controller': 'katello_activation_keys', 'session_name': 'activationkey', + 'old_ui': True, }, - {'name': 'Dashboard', 'controller': 'dashboard', 'session_name': 'dashboard'}, - {'name': 'Audit', 'controller': 'audits', 'session_name': 'audit'}, - { - 'name': 'Report', - 'controller': 'config_reports', - 'setup': entities.Report, - 'session_name': 'configreport', - }, - {'name': 'Task', 'controller': 'foreman_tasks_tasks', 'session_name': 'task'}, - # TODO Load manifest for the test_positive_end_to_end from the ui/test_bookmarks.py - # {'name': 'Subscriptions', 'controller': 'subscriptions','session_name': 'subscription' }, - {'name': 'Product', 'controller': 'katello_products', 'session_name': 'product'}, - {'name': 'Repository', 'controller': 'katello_repositories', 'session_name': 'repository'}, - { - 'name': 'ContentCredential', - 'controller': 'katello_content_credentials', - 'session_name': 'contentcredential', - }, - {'name': 'SyncPlan', 'controller': 'katello_sync_plans', 'session_name': 'syncplan'}, - {'name': 'ContentView', 'controller': 'katello_content_views', 'session_name': 'contentview'}, - {'name': 'Errata', 'controller': 'katello_errata', 'session_name': 'errata'}, - {'name': 'Package', 'controller': 'katello_erratum_packages', 'session_name': 'package'}, - { - 'name': 'ContainerImageTag', - 'controller': 'katello_docker_tags', - 'session_name': 'containerimagetag', - }, + {'name': 'Errata', 'controller': 'katello_errata', 'session_name': 'errata', 'old_ui': True}, {'name': 'Host', 'controller': 'hosts', 'setup': entities.Host, 'session_name': 'host_new'}, - {'name': 'ContentHost', 'controller': 'hosts', 'session_name': 'contenthost'}, { - 'name': 'HostCollection', - 'controller': 'katello_host_collections', - 'session_name': 'hostcollection', - }, - {'name': 'Architecture', 'controller': 'architectures', 'session_name': 'architecture'}, - { - 'name': 'HardwareModel', - 'controller': 'models', - 'setup': entities.Model, - 'session_name': 'hardwaremodel', - }, - { - 'name': 'InstallationMedia', - 'controller': 'media', - 'session_name': 'media', - 'setup': entities.Media, - }, - { - 'name': 'OperatingSystem', - 'controller': 'operatingsystems', - 'session_name': 'operatingsystem', + 'name': 'UserGroup', + 'controller': 'usergroups', + 'setup': entities.UserGroup, + 'session_name': 'usergroup', }, { 'name': 'PartitionTable', @@ -1710,65 +1676,24 @@ class Colored(Box): 'setup': entities.PartitionTable, 'session_name': 'partitiontable', }, + { + 'name': 'Product', + 'controller': 'katello_products', + 'session_name': 'product', + 'old_ui': True, + }, { 'name': 'ProvisioningTemplate', 'controller': 'provisioning_templates', 'session_name': 'provisioningtemplate', }, - { - 'name': 'HostGroup', - 'controller': 'hostgroups', - 'setup': entities.HostGroup, - 'session_name': 'hostgroup', - }, - { - 'name': 'DiscoveryRule', - 'controller': 'discovery_rules', - 'setup': entities.DiscoveryRule, - 'session_name': 'discoveryrule', - }, - { - 'name': 'GlobalParameter', - 'controller': 'common_parameters', - 'setup': entities.CommonParameter, - 'skip_for_ui': True, - }, - {'name': 'Role', 'controller': 'ansible_roles', 'setup': entities.Role, 'session_name': 'role'}, - {'name': 'Variables', 'controller': 'ansible_variables', 'session_name': 'ansiblevariables'}, - {'name': 'Capsules', 'controller': 'smart_proxies', 'session_name': 'capsule'}, - { - 'name': 'ComputeResource', - 'controller': 'compute_resources', - 'setup': entities.LibvirtComputeResource, - 'session_name': 'computeresource', - }, - { - 'name': 'ComputeProfile', - 'controller': 'compute_profiles', - 'setup': entities.ComputeProfile, - 'session_name': 'computeprofile', - }, - {'name': 'Subnet', 'controller': 'subnets', 'setup': entities.Subnet, 'session_name': 'subnet'}, - {'name': 'Domain', 'controller': 'domains', 'setup': entities.Domain, 'session_name': 'domain'}, - {'name': 'Realm', 'controller': 'realms', 'setup': entities.Realm, 'session_name': 'realm'}, - {'name': 'Location', 'controller': 'locations', 'session_name': 'location'}, - {'name': 'Organization', 'controller': 'organizations', 'session_name': 'organization'}, - {'name': 'User', 'controller': 'users', 'session_name': 'user'}, - { - 'name': 'UserGroup', - 'controller': 'usergroups', - 'setup': entities.UserGroup, - 'session_name': 'usergroup', - }, - {'name': 'Role', 'controller': 'roles', 'session_name': 'role'}, - {'name': 'Settings', 'controller': 'settings', 'session_name': 'settings'}, + {'name': 'Repository', 'controller': 'katello_repositories', 'session_name': 'repository'}, ] STRING_TYPES = ['alpha', 'numeric', 'alphanumeric', 'latin1', 'utf8', 'cjk', 'html'] VMWARE_CONSTANTS = { 'folder': 'vm', - 'guest_os': 'Red Hat Enterprise Linux 8 (64 bit)', 'scsicontroller': 'LSI Logic Parallel', 'virtualhw_version': 'Default', 'pool': 'Resources', @@ -1809,10 +1734,15 @@ class Colored(Box): ), } - +# Bugzilla statuses used by Robottelo issue handler. OPEN_STATUSES = ("NEW", "ASSIGNED", "POST", "MODIFIED") CLOSED_STATUSES = ("ON_QA", "VERIFIED", "RELEASE_PENDING", "CLOSED") WONTFIX_RESOLUTIONS = ("WONTFIX", "CANTFIX", "DEFERRED") +# Jira statuses used by Robottelo issue handler. +JIRA_OPEN_STATUSES = ("New", "Backlog", "Refinement", "To Do", "In Progress") +JIRA_ONQA_STATUS = "Review" +JIRA_CLOSED_STATUSES = ("Release Pending", "Closed") +JIRA_WONTFIX_RESOLUTIONS = "Obsolete" GROUP_MEMBERSHIP_MAPPER = { "config": { @@ -1944,7 +1874,6 @@ class Colored(Box): "content_export_repository", "content_export_version", "content_rhel_role", - "content_upload_ostree", "content_upload", "content_view_filter_info", "content_view_filter_rule_info", @@ -2005,9 +1934,9 @@ class Colored(Box): "puppet_environment", "realm", "redhat_manifest", + "registration_command", "repositories_role", "repository_info", - "repository_ostree", "repository_set_info", "repository_set", "repository_sync", @@ -2034,6 +1963,7 @@ class Colored(Box): "usergroup", "user", "wait_for_task", + "webhook", ] FAM_ROOT_DIR = '/usr/share/ansible/collections/ansible_collections/redhat/satellite' @@ -2128,6 +2058,8 @@ class Colored(Box): 'option is not present in the /etc/dnf/dnf.conf' ) +EXPIRED_MANIFEST = 'expired-manifest.zip' + # Data File Paths class DataFile(Box): @@ -2148,3 +2080,4 @@ class DataFile(Box): PARTITION_SCRIPT_DATA_FILE = DATA_DIR.joinpath(PARTITION_SCRIPT_DATA_FILE) OS_TEMPLATE_DATA_FILE = DATA_DIR.joinpath(OS_TEMPLATE_DATA_FILE) FAKE_3_YUM_REPO_RPMS_ANT = DATA_DIR.joinpath(FAKE_3_YUM_REPO_RPMS[0]) + EXPIRED_MANIFEST_FILE = DATA_DIR.joinpath(EXPIRED_MANIFEST) diff --git a/robottelo/constants/repos.py b/robottelo/constants/repos.py index 9738894a689..9d968a3830b 100644 --- a/robottelo/constants/repos.py +++ b/robottelo/constants/repos.py @@ -12,7 +12,7 @@ CUSTOM_RPM_SHA = 'https://fixtures.pulpproject.org/rpm-with-sha/' CUSTOM_RPM_SHA_512 = 'https://fixtures.pulpproject.org/rpm-with-sha-512/' FAKE_5_YUM_REPO = 'https://rplevka.fedorapeople.org/fakerepo01/' -FAKE_YUM_DRPM_REPO = 'https://fixtures.pulpproject.org/drpm-signed/' +FAKE_YUM_MISSING_REPO = 'https://fixtures.pulpproject.org/missing-repo/' FAKE_YUM_SRPM_REPO = 'https://fixtures.pulpproject.org/srpm-signed/' FAKE_YUM_SRPM_DUPLICATE_REPO = 'https://fixtures.pulpproject.org/srpm-duplicate/' FAKE_YUM_MD5_REPO = 'https://fixtures.pulpproject.org/rpm-with-md5/' @@ -25,6 +25,6 @@ FAKE_0_YUM_REPO_STRING_BASED_VERSIONS = ( 'https://fixtures.pulpproject.org/rpm-string-version-updateinfo/' ) - +FAKE_ZST_REPO = 'https://fixtures.pulpproject.org/rpm-zstd-metadata' ANSIBLE_GALAXY = 'https://galaxy.ansible.com/' ANSIBLE_HUB = 'https://cloud.redhat.com/api/automation-hub/' diff --git a/robottelo/content_info.py b/robottelo/content_info.py index fd404f5a7d9..803692a0823 100644 --- a/robottelo/content_info.py +++ b/robottelo/content_info.py @@ -1,4 +1,5 @@ """Miscellaneous content helper functions""" + import os import re diff --git a/robottelo/exceptions.py b/robottelo/exceptions.py index a6100564873..057e0891cfc 100644 --- a/robottelo/exceptions.py +++ b/robottelo/exceptions.py @@ -107,9 +107,7 @@ def __str__(self): def __repr__(self): """Include class name status, stderr and msg to improve logging""" - return '{}(status={!r}, stderr={!r}, msg={!r}'.format( - type(self).__name__, self.status, self.stderr, self.msg - ) + return f'{type(self).__name__}(status={self.status!r}, stderr={self.stderr!r}, msg={self.msg!r}' class CLIReturnCodeError(CLIBaseError): diff --git a/robottelo/host_helpers/api_factory.py b/robottelo/host_helpers/api_factory.py index da6a39ecd76..d0591770888 100644 --- a/robottelo/host_helpers/api_factory.py +++ b/robottelo/host_helpers/api_factory.py @@ -2,7 +2,9 @@ It is not meant to be used directly, but as part of a robottelo.hosts.Satellite instance example: my_satellite.api_factory.api_method() """ + from contextlib import contextmanager +from datetime import datetime import time from fauxfactory import gen_ipaddr, gen_mac, gen_string @@ -144,7 +146,7 @@ def enable_sync_redhat_repo(self, rh_repo, org_id, timeout=1500): """Enable the RedHat repo, sync it and returns repo_id""" # Enable RH repo and fetch repository_id repo_id = self.enable_rhrepo_and_fetchid( - basearch=rh_repo['basearch'], + basearch=rh_repo.get('basearch', rh_repo.get('arch', DEFAULT_ARCHITECTURE)), org_id=org_id, product=rh_repo['product'], repo=rh_repo['name'], @@ -647,12 +649,18 @@ def attach_custom_product_subscription(self, prod_name=None, host_name=None): ) def wait_for_errata_applicability_task( - self, host_id, from_when, search_rate=1, max_tries=10, poll_rate=None, poll_timeout=15 + self, + host_id, + from_when, + search_rate=1, + max_tries=10, + poll_rate=None, + poll_timeout=15, ): """Search the generate applicability task for given host and make sure it finishes :param int host_id: Content host ID of the host where we are regenerating applicability. - :param int from_when: Timestamp (in UTC) to limit number of returned tasks to investigate. + :param int from_when: Epoch Time (seconds in UTC) to limit number of returned tasks to investigate. :param int search_rate: Delay between searches. :param int max_tries: How many times search should be executed. :param int poll_rate: Delay between the end of one task check-up and @@ -666,23 +674,30 @@ def wait_for_errata_applicability_task( assert isinstance(host_id, int), 'Param host_id have to be int' assert isinstance(from_when, int), 'Param from_when have to be int' now = int(time.time()) - assert from_when <= now, 'Param from_when have to be timestamp in the past' + assert from_when <= now, 'Param from_when have to be epoch time in the past' for _ in range(max_tries): now = int(time.time()) - max_age = now - from_when + 1 + # Format epoch time for search, one second prior margin of safety + timestamp = datetime.fromtimestamp(from_when - 1).strftime('%m-%d-%Y %H:%M:%S') + # Long format to match search: ex. 'January 03, 2024 at 03:08:08 PM' + long_format = datetime.strptime(timestamp, '%m-%d-%Y %H:%M:%S').strftime( + '%B %d, %Y at %I:%M:%S %p' + ) search_query = ( - '( label = Actions::Katello::Host::GenerateApplicability OR label = ' - 'Actions::Katello::Host::UploadPackageProfile ) AND started_at > "%s seconds ago"' - % max_age + '( label = Actions::Katello::Applicability::Hosts::BulkGenerate OR' + ' label = Actions::Katello::Host::UploadPackageProfile ) AND' + f' started_at >= "{long_format}" ' ) tasks = self._satellite.api.ForemanTask().search(query={'search': search_query}) tasks_finished = 0 for task in tasks: if ( - task.label == 'Actions::Katello::Host::GenerateApplicability' + task.label == 'Actions::Katello::Applicability::Hosts::BulkGenerate' + and 'host_ids' in task.input and host_id in task.input['host_ids'] ) or ( task.label == 'Actions::Katello::Host::UploadPackageProfile' + and 'host' in task.input and host_id == task.input['host']['id'] ): task.poll(poll_rate=poll_rate, timeout=poll_timeout) @@ -692,7 +707,7 @@ def wait_for_errata_applicability_task( time.sleep(search_rate) else: raise AssertionError( - f"No task was found using query '{search_query}' for host '{host_id}'" + f'No task was found using query " {search_query} " for host id: {host_id}' ) def wait_for_syncplan_tasks(self, repo_backend_id=None, timeout=10, repo_name=None): diff --git a/robottelo/host_helpers/cli_factory.py b/robottelo/host_helpers/cli_factory.py index 7d3f9f1afcb..5424982e06a 100644 --- a/robottelo/host_helpers/cli_factory.py +++ b/robottelo/host_helpers/cli_factory.py @@ -3,6 +3,7 @@ It is not meant to be used directly, but as part of a robottelo.hosts.Satellite instance example: my_satellite.cli_factory.make_org() """ + import datetime from functools import lru_cache, partial import inspect @@ -56,9 +57,7 @@ def create_object(cli_object, options, values=None, credentials=None): except CLIReturnCodeError as err: # If the object is not created, raise exception, stop the show. raise CLIFactoryError( - 'Failed to create {} with data:\n{}\n{}'.format( - cli_object.__name__, pprint.pformat(options, indent=2), err.msg - ) + f'Failed to create {cli_object.__name__} with data:\n{pprint.pformat(options, indent=2)}\n{err.msg}' ) from err # Sometimes we get a list with a dictionary and not a dictionary. if isinstance(result, list) and len(result) > 0: @@ -1003,9 +1002,7 @@ def add_role_permissions(self, role_id, resource_permissions): missing_permissions = set(permission_names).difference(set(available_permission_names)) if missing_permissions: raise CLIFactoryError( - 'Permissions "{}" are not available in Resource "{}"'.format( - list(missing_permissions), resource_type - ) + f'Permissions "{list(missing_permissions)}" are not available in Resource "{resource_type}"' ) # Create the current resource type role permissions options = {'role-id': role_id} diff --git a/robottelo/host_helpers/contenthost_mixins.py b/robottelo/host_helpers/contenthost_mixins.py index 96da029d148..79d3001a7e5 100644 --- a/robottelo/host_helpers/contenthost_mixins.py +++ b/robottelo/host_helpers/contenthost_mixins.py @@ -1,4 +1,5 @@ """A collection of mixins for robottelo.hosts classes""" + from functools import cached_property import json from tempfile import NamedTemporaryFile @@ -136,6 +137,12 @@ def applicable_errata_count(self): """return the applicable errata count for a host""" return self.nailgun_host.read().content_facet_attributes['errata_counts']['total'] + @property + def applicable_package_count(self): + """return the applicable package count for a host""" + self.run('subscription-manager repos') + return self.nailgun_host.read().content_facet_attributes['applicable_package_count'] + class SystemFacts: """Helpers mixin that enables getting/setting subscription-manager facts on a host""" diff --git a/robottelo/host_helpers/repository_mixins.py b/robottelo/host_helpers/repository_mixins.py index 09a5fb1529d..d8dbc9b9798 100644 --- a/robottelo/host_helpers/repository_mixins.py +++ b/robottelo/host_helpers/repository_mixins.py @@ -2,6 +2,7 @@ All the Repository classes in this module are supposed to use from sat_object.cli_factory object. The direct import of the repo classes in this module is prohibited !!!!! """ + import inspect import sys @@ -509,7 +510,6 @@ class RepositoryCollection: satellite = None def __init__(self, distro=None, repositories=None): - self._items = [] if distro is not None and distro not in constants.DISTROS_SUPPORTED: diff --git a/robottelo/host_helpers/satellite_mixins.py b/robottelo/host_helpers/satellite_mixins.py index 3c26862c3bf..58fe5b5d237 100644 --- a/robottelo/host_helpers/satellite_mixins.py +++ b/robottelo/host_helpers/satellite_mixins.py @@ -288,10 +288,10 @@ def default_url_on_new_port(self, oldport, newport): post_ncat_procs = self.execute('pgrep ncat').stdout.splitlines() ncat_pid = set(post_ncat_procs).difference(set(pre_ncat_procs)) if not len(ncat_pid): - stderr = channel.get_exit_status()[1] - logger.debug(f'Tunnel failed: {stderr}') + err = channel.get_exit_signal() + logger.debug(f'Tunnel failed: {err}') # Something failed, so raise an exception. - raise CapsuleTunnelError(f'Starting ncat failed: {stderr}') + raise CapsuleTunnelError(f'Starting ncat failed: {err}') forward_url = f'https://{self.hostname}:{newport}' logger.debug(f'Yielding capsule forward port url: {forward_url}') try: @@ -308,7 +308,7 @@ def validate_pulp_filepath( ): """Checks the existence of certain files in a pulp dir""" extension_query = ' -o '.join([f'-name "{file}"' for file in file_names]) - result = self.execute(fr'find {dir_path}{org.name} -type f \( {extension_query} \)') + result = self.execute(rf'find {dir_path}{org.name} -type f \( {extension_query} \)') return result.stdout diff --git a/robottelo/host_helpers/ui_factory.py b/robottelo/host_helpers/ui_factory.py index df156ad6d6f..84c1498bcd5 100644 --- a/robottelo/host_helpers/ui_factory.py +++ b/robottelo/host_helpers/ui_factory.py @@ -3,6 +3,7 @@ Need to pass the existing session object to the ui_factory method as a parameter example: my_satellite.ui_factory(session).ui_method() """ + from fauxfactory import gen_string from robottelo.constants import DEFAULT_CV, ENVIRONMENT diff --git a/robottelo/hosts.py b/robottelo/hosts.py index 12cc3dfd0e9..93cc4f62f08 100644 --- a/robottelo/hosts.py +++ b/robottelo/hosts.py @@ -61,7 +61,7 @@ POWER_OPERATIONS = { VmState.RUNNING: 'running', VmState.STOPPED: 'stopped', - 'reboot': 'reboot' + 'reboot': 'reboot', # TODO paused, suspended, shelved? } @@ -169,6 +169,10 @@ class IPAHostError(Exception): pass +class ProxyHostError(Exception): + pass + + class ContentHost(Host, ContentHostMixins): run = Host.execute default_timeout = settings.server.ssh_client.command_timeout @@ -432,7 +436,10 @@ def power_control(self, state=VmState.RUNNING, ensure=True): if ensure and state in [VmState.RUNNING, 'reboot']: try: wait_for( - self.connect, fail_condition=lambda res: res is not None, handle_exception=True + self.connect, + fail_condition=lambda res: res is not None, + timeout=300, + handle_exception=True, ) # really broad diaper here, but connection exceptions could be a ton of types except TimedOutError as toe: @@ -842,10 +849,7 @@ def register_contenthost( registration. """ - if username and password: - userpass = f' --username {username} --password {password}' - else: - userpass = '' + userpass = f' --username {username} --password {password}' if username and password else '' # Setup the base command cmd = 'subscription-manager register' if org: @@ -888,12 +892,17 @@ def get(self, remote_path, local_path=None): """Get a remote file from the broker virtual machine.""" self.session.sftp_read(source=remote_path, destination=local_path) - def put(self, local_path, remote_path=None): + def put(self, local_path, remote_path=None, temp_file=False): """Put a local file to the broker virtual machine. If local_path is a manifest object, write its contents to a temporary file then continue with the upload. """ - if 'utils.manifest' in str(local_path): + if temp_file: + with NamedTemporaryFile(dir=robottelo_tmp_dir) as content_file: + content_file.write(str.encode(local_path)) + content_file.flush() + self.session.sftp_write(source=content_file.name, destination=remote_path) + elif 'utils.manifest' in str(local_path): with NamedTemporaryFile(dir=robottelo_tmp_dir) as content_file: content_file.write(local_path.content.read()) content_file.flush() @@ -937,11 +946,7 @@ def add_authorized_key(self, pub_key): # ensure ssh directory exists self.execute(f'mkdir -p {ssh_path}') # append the key if doesn't exists - self.execute( - "grep -q '{key}' {dest} || echo '{key}' >> {dest}".format( - key=key_content, dest=auth_file - ) - ) + self.execute(f"grep -q '{key_content}' {auth_file} || echo '{key_content}' >> {auth_file}") # set proper permissions self.execute(f'chmod 700 {ssh_path}') self.execute(f'chmod 600 {auth_file}') @@ -1607,7 +1612,8 @@ def install(self, installer_obj=None, cmd_args=None, cmd_kwargs=None): """General purpose installer""" if not installer_obj: command_opts = {'scenario': self.__class__.__name__.lower()} - command_opts.update(cmd_kwargs) + if cmd_kwargs: + command_opts.update(cmd_kwargs) installer_obj = InstallerCommand(*cmd_args, **command_opts) return self.execute(installer_obj.get_command(), timeout=0) @@ -1706,22 +1712,6 @@ def set_rex_script_mode_provider(self, mode='ssh'): if result.status != 0: raise SatelliteHostError(f'Failed to enable pull provider: {result.stdout}') - def run_installer_arg(self, *args, timeout='20m'): - """Run an installer argument on capsule""" - installer_args = list(args) - installer_command = InstallerCommand( - installer_args=installer_args, - ) - result = self.execute( - installer_command.get_command(), - timeout=timeout, - ) - if result.status != 0: - raise SatelliteHostError( - f'Failed to execute with arguments: {installer_args} and,' - f' the stderr is {result.stderr}' - ) - def set_mqtt_resend_interval(self, value): """Set the time interval in seconds at which the notification should be re-sent to the mqtt host until the job is picked up or cancelled""" @@ -2558,3 +2548,42 @@ def remove_user_from_usergroup(self, member_username, member_group): ) if result.status != 0: raise IPAHostError('Failed to remove the user from user group') + + +class ProxyHost(Host): + """Class representing HTTP Proxy host""" + + def __init__(self, url, **kwargs): + self._conf_dir = '/etc/squid/' + self._access_log = '/var/log/squid/access.log' + kwargs['hostname'] = urlparse(url).hostname + super().__init__(**kwargs) + + def add_user(self, name, passwd): + """Adds new user to the HTTP Proxy""" + res = self.execute(f"htpasswd -b {self._conf_dir}passwd {name} '{passwd}'") + assert res.status == 0, f'User addition failed on the proxy side: {res.stderr}' + return res + + def remove_user(self, name): + """Removes a user from HTTP Proxy""" + res = self.execute(f'htpasswd -D {self._conf_dir}passwd {name}') + assert res.status == 0, f'User deletion failed on the proxy side: {res.stderr}' + return res + + def get_log(self, which=None, tail=None, grep=None): + """Returns log content from the HTTP Proxy instance + + :param which: Which log file should be read. Defaults to access.log. + :param tail: Use when only the tail of a long log file is needed. + :param grep: Grep for some expression. + :return: Log content found or None + """ + log_file = which or self._access_log + cmd = f'tail -n {tail} {log_file}' if tail else f'cat {log_file}' + if grep: + cmd = f'{cmd} | grep "{grep}"' + res = self.execute(cmd) + if res.status != 0: + raise ProxyHostError(f'Proxy log read failed: {res.stderr}') + return None if res.stdout == '' else res.stdout diff --git a/robottelo/ssh.py b/robottelo/ssh.py index 8b72bed3497..1be3a65a811 100644 --- a/robottelo/ssh.py +++ b/robottelo/ssh.py @@ -1,4 +1,5 @@ """Utility module to handle the shared ssh connection.""" + from robottelo.cli import hammer diff --git a/robottelo/utils/datafactory.py b/robottelo/utils/datafactory.py index c5ccc38ba14..6f571986a56 100644 --- a/robottelo/utils/datafactory.py +++ b/robottelo/utils/datafactory.py @@ -1,4 +1,5 @@ """Data Factory for all entities""" + from functools import wraps import random import string @@ -389,7 +390,7 @@ def valid_names_list(): f"νέος χρήστης-{gen_string('utf8', 2)}", f"foo@!#$^&*( ) {gen_string('utf8')}", f"{gen_string('utf8')}", - f"bar+{{}}|\"?hi {gen_string('utf8')}", + f"bar+{{}}|?hi {gen_string('utf8')}", f" {gen_string('utf8')}", f"{gen_string('utf8')} ", ] diff --git a/robottelo/utils/decorators/__init__.py b/robottelo/utils/decorators/__init__.py index 371afd50f16..3dd3c9edeb3 100644 --- a/robottelo/utils/decorators/__init__.py +++ b/robottelo/utils/decorators/__init__.py @@ -1,4 +1,5 @@ """Implements various decorators""" + from functools import wraps OBJECT_CACHE = {} diff --git a/robottelo/utils/decorators/func_locker.py b/robottelo/utils/decorators/func_locker.py index 08f4073c614..83632428313 100644 --- a/robottelo/utils/decorators/func_locker.py +++ b/robottelo/utils/decorators/func_locker.py @@ -39,6 +39,7 @@ def test_that_conflict_with_test_to_lock(self) with locking_function(self.test_to_lock): # do some operations that conflict with test_to_lock """ + from contextlib import contextmanager import functools import inspect @@ -223,7 +224,6 @@ def lock_function( class_name = '.'.join(class_names) def main_wrapper(func): - func.__class_name__ = class_name func.__function_locked__ = True @@ -240,9 +240,7 @@ def function_wrapper(*args, **kwargs): with file_lock(lock_file_path, remove=False, timeout=timeout) as handler: logger.info( - 'process id: {} lock function using file path: {}'.format( - process_id, lock_file_path - ) + f'process id: {process_id} lock function using file path: {lock_file_path}' ) # write the process id that locked this function _write_content(handler, process_id) @@ -303,9 +301,7 @@ def locking_function( with file_lock(lock_file_path, remove=False, timeout=timeout) as handler: logger.info( - 'process id: {} - lock function name:{} - using file path: {}'.format( - process_id, function_name, lock_file_path - ) + f'process id: {process_id} - lock function name:{function_name} - using file path: {lock_file_path}' ) # write the process id that locked this function _write_content(handler, process_id) diff --git a/robottelo/utils/decorators/func_shared/file_storage.py b/robottelo/utils/decorators/func_shared/file_storage.py index 70a6d6af889..e7935300c86 100644 --- a/robottelo/utils/decorators/func_shared/file_storage.py +++ b/robottelo/utils/decorators/func_shared/file_storage.py @@ -45,7 +45,6 @@ class FileStorageHandler(BaseStorageHandler): """Key value file storage handler.""" def __init__(self, root_dir=None, create=True, lock_timeout=LOCK_TIMEOUT): - if root_dir is None: root_dir = _get_root_dir() diff --git a/robottelo/utils/decorators/func_shared/redis_storage.py b/robottelo/utils/decorators/func_shared/redis_storage.py index 571ac0ac699..74a5b8d0ecc 100644 --- a/robottelo/utils/decorators/func_shared/redis_storage.py +++ b/robottelo/utils/decorators/func_shared/redis_storage.py @@ -23,7 +23,6 @@ def __init__( password=REDIS_PASSWORD, lock_timeout=LOCK_TIMEOUT, ): - self._lock_timeout = lock_timeout self._client = redis.StrictRedis(host=host, port=port, db=db, password=password) diff --git a/robottelo/utils/decorators/func_shared/shared.py b/robottelo/utils/decorators/func_shared/shared.py index 73961df50b5..50e2d371fbe 100644 --- a/robottelo/utils/decorators/func_shared/shared.py +++ b/robottelo/utils/decorators/func_shared/shared.py @@ -84,6 +84,7 @@ def shared_class_method(cls, org=None, repo=None): return dict(org=cls.org, repo=cls.repo} """ + import datetime import functools import hashlib @@ -213,7 +214,6 @@ def __init__( inject=False, injected_kw='_inject', ): - if storage_handler is None: storage_handler = _get_default_storage_handler() @@ -258,7 +258,6 @@ def _encode_result_kwargs(self, kwargs): return kwargs def _call_function(self): - retries = self._max_retries if not retries: retries = 1 @@ -271,9 +270,7 @@ def _call_function(self): traceback_text = None try: logger.info( - 'calling shared function: {} - retry index: {}'.format( - self._function_key, retry_index - ) + f'calling shared function: {self._function_key} - retry index: {retry_index}' ) result = self._function(*self._function_args, **self._function_kwargs) break @@ -334,9 +331,7 @@ def __call__(self): creation_datetime = datetime.datetime.utcnow().strftime(_DATETIME_FORMAT) if exp: error = str(exp) or 'error occurred' - error_class_name = '{}.{}'.format( - exp.__class__.__module__, exp.__class__.__name__ - ) + error_class_name = f'{exp.__class__.__module__}.{exp.__class__.__name__}' value = dict( state=_STATE_FAILED, id=self.transaction, @@ -469,11 +464,7 @@ def _get_function_name_key(function_name, scope=None, scope_kwargs=None, scope_c scope_name = _get_scope_name( scope=scope, scope_kwargs=scope_kwargs, scope_context=scope_context ) - if scope_name: - function_name_key = '.'.join([scope_name, function_name]) - else: - function_name_key = function_name - return function_name_key + return '.'.join([scope_name, function_name]) if scope_name else function_name def shared( diff --git a/robottelo/utils/issue_handlers/README.md b/robottelo/utils/issue_handlers/README.md index 1ef130583d0..8661a4965e5 100644 --- a/robottelo/utils/issue_handlers/README.md +++ b/robottelo/utils/issue_handlers/README.md @@ -13,7 +13,7 @@ Issue handler should expose 3 functions. ### `is_open_(issue, data=None)` -e.g: `is_open_bz, is_open_gh, is_open_jr` for Bugzilla, Github and Jira. +e.g: `is_open_bz, is_open_gh, is_open_jira` for Bugzilla, Github and Jira. This function is dispatched from `robottelo.helpers.is_open` that is also used to check for status in the `pytest.mark.skip_if_open` marker. @@ -78,10 +78,10 @@ Example of `collected_data`: ## Issue handlers implemented - `.bugzilla.py`: BZ:123456 +- `.jira.py`: SAT-22761 ## Issue handlers to be implemented - `.github.py`: GH:satelliteqe/robottelo#123 - `.gitlab.py`: GL:path/to/repo#123 -- `.jira.py`: JR:SATQE-4561 - `.redmine.py`: RM:pulp.plan.io#5580 diff --git a/robottelo/utils/issue_handlers/__init__.py b/robottelo/utils/issue_handlers/__init__.py index d59c97aec63..803b5800080 100644 --- a/robottelo/utils/issue_handlers/__init__.py +++ b/robottelo/utils/issue_handlers/__init__.py @@ -1,8 +1,10 @@ +import re + # Methods related to issue handlers in general -from robottelo.utils.issue_handlers import bugzilla +from robottelo.utils.issue_handlers import bugzilla, jira -handler_methods = {'BZ': bugzilla.is_open_bz} -SUPPORTED_HANDLERS = tuple(f"{handler}:" for handler in handler_methods) +handler_methods = {'BZ': bugzilla.is_open_bz, 'SAT': jira.is_open_jira} +SUPPORTED_HANDLERS = tuple(f"{handler}" for handler in handler_methods) def add_workaround(data, matches, usage, validation=(lambda *a, **k: True), **kwargs): @@ -16,10 +18,11 @@ def add_workaround(data, matches, usage, validation=(lambda *a, **k: True), **kw def should_deselect(issue, data=None): """Check if test should be deselected based on marked issue.""" # Handlers can be extended to support different issue trackers. - handlers = {'BZ': bugzilla.should_deselect_bz} - supported_handlers = tuple(f"{handler}:" for handler in handlers) + handlers = {'BZ': bugzilla.should_deselect_bz, 'SAT': jira.should_deselect_jira} + supported_handlers = tuple(f"{handler}" for handler in handlers) if str(issue).startswith(supported_handlers): - handler_code = str(issue).partition(":")[0] + res = re.split(':|-', issue) + handler_code = res[0] return handlers[handler_code.strip()](issue.strip(), data) return None @@ -29,7 +32,7 @@ def is_open(issue, data=None): Issue must be prefixed by its handler e.g: - Bugzilla: BZ:123456 + Bugzilla: BZ:123456, Jira: SAT-12345 Arguments: issue {str} -- A string containing handler + number e.g: BZ:123465 @@ -37,11 +40,12 @@ def is_open(issue, data=None): """ # Handlers can be extended to support different issue trackers. if str(issue).startswith(SUPPORTED_HANDLERS): - handler_code = str(issue).partition(":")[0] + res = re.split(':|-', issue) + handler_code = res[0] else: # EAFP raise AttributeError( "is_open argument must be a string starting with a handler code " - "e.g: 'BZ:123456'" + "e.g: 'BZ:123456' for Bugzilla and 'SAT-12345' for Jira." f"supported handlers are: {SUPPORTED_HANDLERS}" ) return handler_methods[handler_code.strip()](issue.strip(), data) diff --git a/robottelo/utils/issue_handlers/bugzilla.py b/robottelo/utils/issue_handlers/bugzilla.py index 20836a3660d..dd1c35da2ab 100644 --- a/robottelo/utils/issue_handlers/bugzilla.py +++ b/robottelo/utils/issue_handlers/bugzilla.py @@ -137,7 +137,7 @@ def collect_data_bz(collected_data, cached_data): # pragma: no cover def collect_dupes(bz, collected_data, cached_data=None): # pragma: no cover - """Recursivelly find for duplicates""" + """Recursively find for duplicates""" cached_data = cached_data or {} if bz.get('resolution') == 'DUPLICATE': # Collect duplicates @@ -180,15 +180,15 @@ def collect_clones(bz, collected_data, cached_data=None): # pragma: no cover @retry( - stop=stop_after_attempt(4), # Retry 3 times before raising - wait=wait_fixed(20), # Wait seconds between retries + stop=stop_after_attempt(4), + wait=wait_fixed(20), ) def get_data_bz(bz_numbers, cached_data=None): # pragma: no cover """Get a list of marked BZ data and query Bugzilla REST API. Arguments: bz_numbers {list of str} -- ['123456', ...] - cached_data + cached_data {dict} -- Cached data previous loaded from API Returns: [list of dicts] -- [{'id':..., 'status':..., 'resolution': ...}] diff --git a/robottelo/utils/issue_handlers/jira.py b/robottelo/utils/issue_handlers/jira.py new file mode 100644 index 00000000000..dfeb1c966c4 --- /dev/null +++ b/robottelo/utils/issue_handlers/jira.py @@ -0,0 +1,274 @@ +from collections import defaultdict +import re + +from packaging.version import Version +import pytest +import requests +from tenacity import retry, stop_after_attempt, wait_fixed + +from robottelo.config import settings +from robottelo.constants import ( + JIRA_CLOSED_STATUSES, + JIRA_ONQA_STATUS, + JIRA_OPEN_STATUSES, + JIRA_WONTFIX_RESOLUTIONS, +) +from robottelo.hosts import get_sat_version +from robottelo.logging import logger + +# match any version as in `sat-6.14.x` or `sat-6.13.0` or `6.13.9` +# The .version group being a `d.d` string that can be casted to Version() +VERSION_RE = re.compile(r'(?:sat-)*?(?P\d\.\d)\.\w*') + + +def is_open_jira(issue, data=None): + """Check if specific Jira is open consulting a cached `data` dict or + calling Jira REST API. + + Arguments: + issue {str} -- The Jira reference e.g: SAT-20548 + data {dict} -- Issue data indexed by : or None + """ + jira = try_from_cache(issue, data) + if jira.get("is_open") is not None: # issue has been already processed + return jira["is_open"] + + jira = follow_duplicates(jira) + status = jira.get('status', '') + resolution = jira.get('resolution', '') + + # Jira is explicitly in OPEN status + if status in JIRA_OPEN_STATUSES: + return True + + # Jira is Closed/Obsolete so considered not fixed yet, Jira is open + if status in JIRA_CLOSED_STATUSES and resolution in JIRA_WONTFIX_RESOLUTIONS: + return True + + # Jira is Closed with a resolution in (Done, Done-Errata, ...) + # server.version is higher or equal than Jira fixVersion + # Consider fixed, Jira is not open + fix_version = jira.get('fixVersions') + if fix_version: + return get_sat_version() < Version(min(fix_version)) + return status not in JIRA_CLOSED_STATUSES and status != JIRA_ONQA_STATUS + + +def are_all_jira_open(issues, data=None): + """Check if all Jira is open consulting a cached `data` dict or + calling Jira REST API. + + Arguments: + issues {list} -- The Jira reference e.g: ['SAT-20548', 'SAT-20548'] + data {dict} -- Issue data indexed by : or None + """ + return all(is_open_jira(issue, data) for issue in issues) + + +def are_any_jira_open(issues, data=None): + """Check if any of the Jira is open consulting a cached `data` dict or + calling Jira REST API. + + Arguments: + issues {list} -- The Jira reference e.g: ['SAT-20548', 'SAT-20548'] + data {dict} -- Issue data indexed by : or None + """ + return any(is_open_jira(issue, data) for issue in issues) + + +def should_deselect_jira(issue, data=None): + """Check if test should be deselected based on marked issue. + + 1. Resolution "Obsolete" should deselect + + Arguments: + issue {str} -- The Jira reference e.g: SAT-12345 + data {dict} -- Issue data indexed by : or None + """ + + jira = try_from_cache(issue, data) + if jira.get("is_deselected") is not None: # issue has been already processed + return jira["is_deselected"] + + jira = follow_duplicates(jira) + + return ( + jira.get('status') in JIRA_CLOSED_STATUSES + and jira.get('resolution') in JIRA_WONTFIX_RESOLUTIONS + ) + + +def follow_duplicates(jira): + """recursively load the duplicate data""" + if jira.get('dupe_data'): + jira = follow_duplicates(jira['dupe_data']) + return jira + + +def try_from_cache(issue, data=None): + """Try to fetch issue from given data cache or previous loaded on pytest. + + Arguments: + issue {str} -- The Jira reference e.g: SAT-12345 + data {dict} -- Issue data indexed by : or None + """ + try: + # issue must be passed in `data` argument or already fetched in pytest + if not data and not len(pytest.issue_data[issue]['data']): + raise ValueError + return data or pytest.issue_data[issue]['data'] + except (KeyError, AttributeError, ValueError): # pragma: no cover + # If not then call Jira API again + return get_single_jira(str(issue)) + + +def collect_data_jira(collected_data, cached_data): # pragma: no cover + """Collect data from Jira API and aggregate in a dictionary. + + Arguments: + collected_data {dict} -- dict with Jira issues collected by pytest + cached_data {dict} -- Cached data previous loaded from API + """ + jira_data = ( + get_data_jira( + [item for item in collected_data if item.startswith('SAT-')], + cached_data=cached_data, + ) + or [] + ) + for data in jira_data: + # If Jira is CLOSED/DUPLICATE collect the duplicate + collect_dupes(data, collected_data, cached_data=cached_data) + + jira_key = f"{data['key']}" + data["is_open"] = is_open_jira(jira_key, data) + collected_data[jira_key]['data'] = data + + +def collect_dupes(jira, collected_data, cached_data=None): # pragma: no cover + """Recursively find for duplicates""" + cached_data = cached_data or {} + if jira.get('resolution') == 'Duplicate': + # Collect duplicates + jira['dupe_data'] = get_single_jira(jira.get('dupe_of'), cached_data=cached_data) + dupe_key = f"{jira['dupe_of']}" + # Store Duplicate also in the main collection for caching + if dupe_key not in collected_data: + collected_data[dupe_key]['data'] = jira['dupe_data'] + collected_data[dupe_key]['is_dupe'] = True + collect_dupes(jira['dupe_data'], collected_data, cached_data) + + +# --- API Calls --- + +# cannot use lru_cache in functions that has unhashable args +CACHED_RESPONSES = defaultdict(dict) + + +@retry( + stop=stop_after_attempt(4), # Retry 3 times before raising + wait=wait_fixed(20), # Wait seconds between retries +) +def get_data_jira(jira_numbers, cached_data=None): # pragma: no cover + """Get a list of marked Jira data and query Jira REST API. + + Arguments: + jira_numbers {list of str} -- ['SAT-12345', ...] + cached_data {dict} -- Cached data previous loaded from API + + Returns: + [list of dicts] -- [{'id':..., 'status':..., 'resolution': ...}] + """ + if not jira_numbers: + return [] + + cached_by_call = CACHED_RESPONSES['get_data'].get(str(sorted(jira_numbers))) + if cached_by_call: + return cached_by_call + + if cached_data: + logger.debug(f"Using cached data for {set(jira_numbers)}") + if not all([f'{number}' in cached_data for number in jira_numbers]): + logger.debug("There are Jira's out of cache.") + return [item['data'] for _, item in cached_data.items() if 'data' in item] + + # Ensure API key is set + if not settings.jira.api_key: + logger.warning( + "Config file is missing jira api_key " + "so all tests with skip_if_open mark is skipped. " + "Provide api_key or a jira_cache.json." + ) + # Provide default data for collected Jira's. + return [get_default_jira(number) for number in jira_numbers] + + # No cached data so Call Jira API + logger.debug(f"Calling Jira API for {set(jira_numbers)}") + jira_fields = [ + "key", + "summary", + "status", + "resolution", + "fixVersions", + ] + # Following fields are dynamically calculated/loaded + for field in ('is_open', 'version'): + assert field not in jira_fields + + # Generate jql + jql = ' OR '.join([f"id = {id}" for id in jira_numbers]) + + response = requests.get( + f"{settings.jira.url}/rest/api/latest/search/", + params={ + "jql": jql, + "fields": ",".join(jira_fields), + }, + headers={"Authorization": f"Bearer {settings.jira.api_key}"}, + ) + response.raise_for_status() + data = response.json().get('issues') + # Clean the data, only keep the required info. + data = [ + { + 'key': issue['key'], + 'summary': issue['fields']['summary'], + 'status': issue['fields']['status']['name'], + 'resolution': issue['fields']['resolution']['name'] + if issue['fields']['resolution'] + else '', + 'fixVersions': [ver['name'] for ver in issue['fields']['fixVersions']] + if issue['fields']['fixVersions'] + else [], + } + for issue in data + if issue is not None + ] + CACHED_RESPONSES['get_data'][str(sorted(jira_numbers))] = data + return data + + +def get_single_jira(number, cached_data=None): # pragma: no cover + """Call Jira API to get a single Jira data and cache it""" + cached_data = cached_data or {} + jira_data = CACHED_RESPONSES['get_single'].get(number) + if not jira_data: + try: + jira_data = cached_data[f"{number}"]['data'] + except (KeyError, TypeError): + jira_data = get_data_jira([str(number)], cached_data) + jira_data = jira_data and jira_data[0] + CACHED_RESPONSES['get_single'][number] = jira_data + return jira_data or get_default_jira(number) + + +def get_default_jira(number): # pragma: no cover + """This is the default Jira data when it is not possible to reach Jira api""" + return { + "key": number, + "is_open": True, + "is_deselected": False, + "status": "", + "resolution": "", + "error": "missing jira api_key", + } diff --git a/robottelo/utils/manifest.py b/robottelo/utils/manifest.py index 52c220f3c59..aa377ae73e3 100644 --- a/robottelo/utils/manifest.py +++ b/robottelo/utils/manifest.py @@ -69,9 +69,9 @@ def manifest_clone(self, org_environment_access=False, name='default'): consumer_data['uuid'] = str(uuid.uuid1()) if org_environment_access: consumer_data['contentAccessMode'] = 'org_environment' - consumer_data['owner'][ - 'contentAccessModeList' - ] = 'entitlement,org_environment' + consumer_data['owner']['contentAccessModeList'] = ( + 'entitlement,org_environment' + ) new_consumer_export_zip.writestr(name, json.dumps(consumer_data)) else: new_consumer_export_zip.writestr(name, consumer_export_zip.read(name)) diff --git a/robottelo/utils/ohsnap.py b/robottelo/utils/ohsnap.py index 96241a759b6..83669cdb61e 100644 --- a/robottelo/utils/ohsnap.py +++ b/robottelo/utils/ohsnap.py @@ -1,4 +1,5 @@ """Utility module to communicate with Ohsnap API""" + from box import Box from packaging.version import Version import requests diff --git a/robottelo/utils/shared_resource.py b/robottelo/utils/shared_resource.py index 0ad0bd92e46..fa17f3f0c14 100644 --- a/robottelo/utils/shared_resource.py +++ b/robottelo/utils/shared_resource.py @@ -21,6 +21,7 @@ ... yield target_sat # give the upgraded satellite to the test ... # Do post-upgrade cleanup steps if any """ + import json from pathlib import Path import time diff --git a/robottelo/utils/ssh.py b/robottelo/utils/ssh.py index 8b72bed3497..1be3a65a811 100644 --- a/robottelo/utils/ssh.py +++ b/robottelo/utils/ssh.py @@ -1,4 +1,5 @@ """Utility module to handle the shared ssh connection.""" + from robottelo.cli import hammer diff --git a/robottelo/utils/vault.py b/robottelo/utils/vault.py index 97f95755bbd..417fa2c13bd 100644 --- a/robottelo/utils/vault.py +++ b/robottelo/utils/vault.py @@ -1,4 +1,5 @@ """Hashicorp Vault Utils where vault CLI is wrapped to perform vault operations""" + import json import os import re diff --git a/robottelo/utils/virtwho.py b/robottelo/utils/virtwho.py index e159a487394..0013071fba9 100644 --- a/robottelo/utils/virtwho.py +++ b/robottelo/utils/virtwho.py @@ -1,4 +1,5 @@ """Utility module to handle the virtwho configure UI/CLI/API testing""" + import json import re import uuid @@ -99,18 +100,14 @@ def register_system(system, activation_key=None, org='Default_Organization', env runcmd('subscription-manager clean', system) runcmd('rpm -qa | grep katello-ca-consumer | xargs rpm -e |sort', system) runcmd( - 'rpm -ihv http://{}/pub/katello-ca-consumer-latest.noarch.rpm'.format( - settings.server.hostname - ), + f'rpm -ihv http://{settings.server.hostname}/pub/katello-ca-consumer-latest.noarch.rpm', system, ) cmd = f'subscription-manager register --org={org} --environment={env} ' if activation_key is not None: cmd += f'--activationkey={activation_key}' else: - cmd += '--username={} --password={}'.format( - settings.server.admin_username, settings.server.admin_password - ) + cmd += f'--username={settings.server.admin_username} --password={settings.server.admin_password}' ret, stdout = runcmd(cmd, system) if ret == 0 or "system has been registered" in stdout: return True @@ -170,9 +167,7 @@ def get_configure_command(config_id, org=DEFAULT_ORG): :param str org: the satellite organization name. """ username, password = Base._get_username_password() - return "hammer -u {} -p {} virt-who-config deploy --id {} --organization '{}' ".format( - username, password, config_id, org - ) + return f"hammer -u {username} -p {password} virt-who-config deploy --id {config_id} --organization '{org}' " def get_configure_file(config_id): @@ -210,7 +205,7 @@ def check_message_in_rhsm_log(message): """Check the message exist in /var/log/rhsm/rhsm.log""" wait_for( lambda: 'Host-to-guest mapping being sent to' in get_rhsm_log(), - timeout=10, + timeout=20, delay=2, ) logs = get_rhsm_log() @@ -230,7 +225,7 @@ def _get_hypervisor_mapping(hypervisor_type): """ wait_for( lambda: 'Host-to-guest mapping being sent to' in get_rhsm_log(), - timeout=10, + timeout=20, delay=2, ) logs = get_rhsm_log() diff --git a/scripts/config_helpers.py b/scripts/config_helpers.py index cf422588b05..bc559c5091b 100644 --- a/scripts/config_helpers.py +++ b/scripts/config_helpers.py @@ -1,4 +1,5 @@ """A series of commands to help with robottelo configuration""" + from pathlib import Path import click diff --git a/scripts/graph_entities.py b/scripts/graph_entities.py index 46038a3e3b0..bd4accfa2ed 100755 --- a/scripts/graph_entities.py +++ b/scripts/graph_entities.py @@ -7,6 +7,7 @@ command provided by the make file in the parent directory. """ + import inspect from nailgun import entities, entity_mixins diff --git a/scripts/hammer_command_tree.py b/scripts/hammer_command_tree.py index 55a10add78e..580447047cc 100755 --- a/scripts/hammer_command_tree.py +++ b/scripts/hammer_command_tree.py @@ -2,6 +2,7 @@ help. """ + import json from robottelo import ssh diff --git a/scripts/token_editor.py b/scripts/token_editor.py index 923aef02aa0..f492c8c3fb2 100755 --- a/scripts/token_editor.py +++ b/scripts/token_editor.py @@ -4,6 +4,7 @@ Reads Python test modules under test/foreman and edit docstring tokens' prefix from ``OLD_TOKEN_PREFIX`` to ``NEW_TOKEN_PREFIX``. """ + import glob import os import re @@ -30,6 +31,6 @@ for test_module in test_modules: with open(test_module) as handler: content = handler.read() - content = TOKEN_RE.sub(fr'{NEW_TOKEN_PREFIX}\1:', content) + content = TOKEN_RE.sub(rf'{NEW_TOKEN_PREFIX}\1:', content) with open(test_module, 'w') as handler: handler.write(content) diff --git a/scripts/tokenize_customer_scenario.py b/scripts/tokenize_customer_scenario.py index 14a0cbf75a5..f810e00c4e0 100644 --- a/scripts/tokenize_customer_scenario.py +++ b/scripts/tokenize_customer_scenario.py @@ -13,6 +13,7 @@ On robottelo root dir run: $ python scripts/tokenize_customer_scenario.py """ + import codemod from codemod import Query, regex_suggestor, run_interactive from codemod.helpers import path_filter diff --git a/scripts/validate_config.py b/scripts/validate_config.py index a01f350ae6e..04af5d7ac48 100644 --- a/scripts/validate_config.py +++ b/scripts/validate_config.py @@ -1,4 +1,5 @@ """Usage: python scripts/validate_config.py""" + from dynaconf.validator import ValidationError from robottelo.config import get_settings diff --git a/testimony.yaml b/testimony.yaml index fed691dd424..f083f58997e 100644 --- a/testimony.yaml +++ b/testimony.yaml @@ -1,6 +1,8 @@ Team: required: true BZ: {} +BlockedBy: {} +Verifies: {} CaseAutomation: casesensitive: true choices: diff --git a/tests/foreman/api/test_acs.py b/tests/foreman/api/test_acs.py index 7821db07fc5..a8b24345bc6 100644 --- a/tests/foreman/api/test_acs.py +++ b/tests/foreman/api/test_acs.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest from requests.exceptions import HTTPError diff --git a/tests/foreman/api/test_activationkey.py b/tests/foreman/api/test_activationkey.py index ef5add381fc..36ee295ef9a 100644 --- a/tests/foreman/api/test_activationkey.py +++ b/tests/foreman/api/test_activationkey.py @@ -12,6 +12,7 @@ """ + import http from fauxfactory import gen_integer, gen_string diff --git a/tests/foreman/api/test_ansible.py b/tests/foreman/api/test_ansible.py index 515d432423f..71d47b860d2 100644 --- a/tests/foreman/api/test_ansible.py +++ b/tests/foreman/api/test_ansible.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + from fauxfactory import gen_string import pytest from wait_for import wait_for @@ -51,163 +52,313 @@ def filtered_user(target_sat, module_org, module_location): return user, password -@pytest.fixture -def rex_host_in_org_and_loc(target_sat, module_org, module_location, rex_contenthost): - host = target_sat.api.Host().search(query={'search': f'name={rex_contenthost.hostname}'})[0] - target_sat.api.Host(id=host.id, organization=[module_org.id]).update(['organization']) - target_sat.api.Host(id=host.id, location=module_location.id).update(['location']) - return host - - -@pytest.mark.e2e -def test_fetch_and_sync_ansible_playbooks(target_sat): - """ - Test Ansible Playbooks api for fetching and syncing playbooks - - :id: 17b4e767-1494-4960-bc60-f31a0495c09f - - :customerscenario: true - - :steps: - - 1. Install ansible collection with playbooks. - 2. Try to fetch the playbooks via api. - 3. Sync the playbooks. - 4. Assert the count of playbooks fetched and synced are equal. +@pytest.mark.upgrade +class TestAnsibleCfgMgmt: + """Test class for Configuration Management with Ansible - :expectedresults: - 1. Playbooks should be fetched and synced successfully. + :CaseComponent: Ansible-ConfigurationManagement - :BZ: 2115686 """ - target_sat.execute( - "ansible-galaxy collection install -p /usr/share/ansible/collections " - "xprazak2.forklift_collection" - ) - proxy_id = target_sat.nailgun_smart_proxy.id - playbook_fetch = target_sat.api.AnsiblePlaybooks().fetch(data={'proxy_id': proxy_id}) - playbooks_count = len(playbook_fetch['results']['playbooks_names']) - playbook_sync = target_sat.api.AnsiblePlaybooks().sync(data={'proxy_id': proxy_id}) - assert playbook_sync['action'] == "Sync playbooks" - - target_sat.wait_for_tasks( - search_query=(f'id = {playbook_sync["id"]}'), - poll_timeout=100, - ) - task_details = target_sat.api.ForemanTask().search( - query={'search': f'id = {playbook_sync["id"]}'} - ) - assert task_details[0].result == 'success' - assert len(task_details[0].output['result']['created']) == playbooks_count + @pytest.mark.e2e + def test_fetch_and_sync_ansible_playbooks(self, target_sat): + """ + Test Ansible Playbooks api for fetching and syncing playbooks -@pytest.mark.e2e -@pytest.mark.no_containers -@pytest.mark.rhel_ver_match('[^6].*') -def test_positive_ansible_job_on_host( - target_sat, module_org, module_location, module_ak_with_synced_repo, rhel_contenthost -): - """ - Test successful execution of Ansible Job on host. + :id: 17b4e767-1494-4960-bc60-f31a0495c09f - :id: c8dcdc54-cb98-4b24-bff9-049a6cc36acb + :steps: + 1. Install ansible collection with playbooks. + 2. Try to fetch the playbooks via api. + 3. Sync the playbooks. + 4. Assert the count of playbooks fetched and synced are equal. - :steps: - 1. Register a content host with satellite - 2. Import a role into satellite - 3. Assign that role to a host - 4. Assert that the role was assigned to the host successfully - 5. Run the Ansible playbook associated with that role - 6. Check if the job is executed. - - :expectedresults: - 1. Host should be assigned the proper role. - 2. Job execution must be successful. + :expectedresults: + 1. Playbooks should be fetched and synced successfully. - :BZ: 2164400 + :BZ: 2115686 - :CaseComponent: Ansible-RemoteExecution - """ - SELECTED_ROLE = 'RedHatInsights.insights-client' - if rhel_contenthost.os_version.major <= 7: - rhel_contenthost.create_custom_repos(rhel7=settings.repos.rhel7_os) - assert rhel_contenthost.execute('yum install -y insights-client').status == 0 - result = rhel_contenthost.register( - module_org, module_location, module_ak_with_synced_repo.name, target_sat - ) - assert result.status == 0, f'Failed to register host: {result.stderr}' - proxy_id = target_sat.nailgun_smart_proxy.id - target_host = rhel_contenthost.nailgun_host - target_sat.api.AnsibleRoles().sync(data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]}) - role_id = target_sat.api.AnsibleRoles().search(query={'search': f'name={SELECTED_ROLE}'})[0].id - target_sat.api.Host(id=target_host.id).add_ansible_role(data={'ansible_role_id': role_id}) - host_roles = target_host.list_ansible_roles() - assert host_roles[0]['name'] == SELECTED_ROLE - assert target_host.name == rhel_contenthost.hostname - - template_id = ( - target_sat.api.JobTemplate() - .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] - .id - ) - job = target_sat.api.JobInvocation().run( - synchronous=False, - data={ - 'job_template_id': template_id, - 'targeting_type': 'static_query', - 'search_query': f'name = {rhel_contenthost.hostname}', - }, - ) - target_sat.wait_for_tasks( - f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000 - ) - result = target_sat.api.JobInvocation(id=job['id']).read() - assert result.succeeded == 1 - target_sat.api.Host(id=target_host.id).remove_ansible_role(data={'ansible_role_id': role_id}) - host_roles = target_host.list_ansible_roles() - assert len(host_roles) == 0 + :customerscenario: true + """ + target_sat.execute( + "ansible-galaxy collection install -p /usr/share/ansible/collections " + "xprazak2.forklift_collection" + ) + proxy_id = target_sat.nailgun_smart_proxy.id + playbook_fetch = target_sat.api.AnsiblePlaybooks().fetch(data={'proxy_id': proxy_id}) + playbooks_count = len(playbook_fetch['results']['playbooks_names']) + playbook_sync = target_sat.api.AnsiblePlaybooks().sync(data={'proxy_id': proxy_id}) + assert playbook_sync['action'] == "Sync playbooks" + + target_sat.wait_for_tasks( + search_query=(f'id = {playbook_sync["id"]}'), + poll_timeout=100, + ) + task_details = target_sat.api.ForemanTask().search( + query={'search': f'id = {playbook_sync["id"]}'} + ) + assert task_details[0].result == 'success' + assert len(task_details[0].output['result']['created']) == playbooks_count + + @pytest.mark.e2e + @pytest.mark.tier2 + def test_add_and_remove_ansible_role_hostgroup(self, target_sat): + """ + Test add and remove functionality for ansible roles in hostgroup via API + + :id: 7672cf86-fa31-11ed-855a-0fd307d2d66b + + :steps: + 1. Create a hostgroup and a nested hostgroup + 2. Sync a few ansible roles + 3. Assign a few ansible roles with the host group + 4. Add some ansible role with the host group + 5. Add some ansible roles to the nested hostgroup + 6. Remove the added ansible roles from the parent and nested hostgroup + + :expectedresults: + 1. Ansible role assign/add/remove functionality should work as expected in API + + :BZ: 2164400 + """ + ROLE_NAMES = [ + 'theforeman.foreman_scap_client', + 'redhat.satellite.hostgroups', + 'RedHatInsights.insights-client', + 'redhat.satellite.compute_resources', + ] + hg = target_sat.api.HostGroup(name=gen_string('alpha')).create() + hg_nested = target_sat.api.HostGroup(name=gen_string('alpha'), parent=hg).create() + proxy_id = target_sat.nailgun_smart_proxy.id + target_sat.api.AnsibleRoles().sync(data={'proxy_id': proxy_id, 'role_names': ROLE_NAMES}) + ROLES = [ + target_sat.api.AnsibleRoles().search(query={'search': f'name={role}'})[0].id + for role in ROLE_NAMES + ] + # Assign first 2 roles to HG and verify it + target_sat.api.HostGroup(id=hg.id).assign_ansible_roles( + data={'ansible_role_ids': ROLES[:2]} + ) + for r1, r2 in zip( + target_sat.api.HostGroup(id=hg.id).list_ansible_roles(), ROLE_NAMES[:2], strict=True + ): + assert r1['name'] == r2 + + # Add next role from list to HG and verify it + target_sat.api.HostGroup(id=hg.id).add_ansible_role(data={'ansible_role_id': ROLES[2]}) + for r1, r2 in zip( + target_sat.api.HostGroup(id=hg.id).list_ansible_roles(), ROLE_NAMES[:3], strict=True + ): + assert r1['name'] == r2 + + # Add next role to nested HG, and verify roles are also nested to HG along with assigned role + # Also, ensure the parent HG does not contain the roles assigned to nested HGs + target_sat.api.HostGroup(id=hg_nested.id).add_ansible_role( + data={'ansible_role_id': ROLES[3]} + ) + for r1, r2 in zip( + target_sat.api.HostGroup(id=hg_nested.id).list_ansible_roles(), + [ROLE_NAMES[-1]] + ROLE_NAMES[:-1], + strict=True, + ): + assert r1['name'] == r2 + + for r1, r2 in zip( + target_sat.api.HostGroup(id=hg.id).list_ansible_roles(), ROLE_NAMES[:3], strict=True + ): + assert r1['name'] == r2 + + # Remove roles assigned one by one from HG and nested HG + for role in ROLES[:3]: + target_sat.api.HostGroup(id=hg.id).remove_ansible_role(data={'ansible_role_id': role}) + hg_roles = target_sat.api.HostGroup(id=hg.id).list_ansible_roles() + assert len(hg_roles) == 0 + + for role in ROLES: + target_sat.api.HostGroup(id=hg_nested.id).remove_ansible_role( + data={'ansible_role_id': role} + ) + hg_nested_roles = target_sat.api.HostGroup(id=hg_nested.id).list_ansible_roles() + assert len(hg_nested_roles) == 0 + + @pytest.mark.e2e + @pytest.mark.tier2 + def test_positive_ansible_roles_inherited_from_hostgroup( + self, request, target_sat, module_org, module_location + ): + """Verify ansible roles inheritance functionality for host with parent/nested hostgroup via API + :id: 7672cf86-fa31-11ed-855a-0fd307d2d66g -@pytest.mark.no_containers -def test_positive_ansible_job_on_multiple_host( - target_sat, - module_org, - rhel9_contenthost, - rhel8_contenthost, - rhel7_contenthost, - module_location, - module_ak_with_synced_repo, -): - """Test execution of Ansible job on multiple hosts simultaneously. + :steps: + 1. Create a host, hostgroup and nested hostgroup + 2. Sync a few ansible roles + 3. Assign a few ansible roles to the host, hostgroup, nested hostgroup and verify it. + 4. Update host to be in parent/nested hostgroup and verify roles assigned - :id: 9369feef-466c-40d3-9d0d-65520d7f21ef + :expectedresults: + 1. Hosts in parent/nested hostgroups must have direct and indirect roles correctly assigned. - :customerscenario: true + :BZ: 2187967 - :steps: - 1. Register multiple content hosts with satellite - 2. Import a role into satellite - 3. Assign that role to all host - 4. Trigger ansible job keeping all host in a single query - 5. Check the passing and failing of individual hosts - 6. Check if one of the job on a host is failed resulting into whole job is marked as failed. + :customerscenario: true + """ + ROLE_NAMES = [ + 'theforeman.foreman_scap_client', + 'RedHatInsights.insights-client', + 'redhat.satellite.compute_resources', + ] + proxy_id = target_sat.nailgun_smart_proxy.id + host = target_sat.api.Host(organization=module_org, location=module_location).create() + hg = target_sat.api.HostGroup(name=gen_string('alpha'), organization=[module_org]).create() + hg_nested = target_sat.api.HostGroup( + name=gen_string('alpha'), parent=hg, organization=[module_org] + ).create() + + @request.addfinalizer + def _finalize(): + host.delete() + hg_nested.delete() + hg.delete() + + target_sat.api.AnsibleRoles().sync(data={'proxy_id': proxy_id, 'role_names': ROLE_NAMES}) + ROLES = [ + target_sat.api.AnsibleRoles().search(query={'search': f'name={role}'})[0].id + for role in ROLE_NAMES + ] + + # Assign roles to Host/Hostgroup/Nested Hostgroup and verify it + target_sat.api.Host(id=host.id).add_ansible_role(data={'ansible_role_id': ROLES[0]}) + assert ROLE_NAMES[0] == target_sat.api.Host(id=host.id).list_ansible_roles()[0]['name'] + + target_sat.api.HostGroup(id=hg.id).add_ansible_role(data={'ansible_role_id': ROLES[1]}) + assert ROLE_NAMES[1] == target_sat.api.HostGroup(id=hg.id).list_ansible_roles()[0]['name'] + + target_sat.api.HostGroup(id=hg_nested.id).add_ansible_role( + data={'ansible_role_id': ROLES[2]} + ) + listroles = target_sat.api.HostGroup(id=hg_nested.id).list_ansible_roles() + assert ROLE_NAMES[2] == listroles[0]['name'] + assert listroles[0]['directly_assigned'] + assert ROLE_NAMES[1] == listroles[1]['name'] + assert not listroles[1]['directly_assigned'] + + # Update host to be in nested hostgroup and verify roles assigned + host.hostgroup = hg_nested + host = host.update(['hostgroup']) + listroles_host = target_sat.api.Host(id=host.id).list_ansible_roles() + assert ROLE_NAMES[0] == listroles_host[0]['name'] + assert listroles_host[0]['directly_assigned'] + assert ROLE_NAMES[1] == listroles_host[1]['name'] + assert not listroles_host[1]['directly_assigned'] + assert ROLE_NAMES[2] == listroles_host[2]['name'] + assert not listroles_host[1]['directly_assigned'] + # Verify nested hostgroup doesn't contains the roles assigned to host + listroles_nested_hg = target_sat.api.HostGroup(id=hg_nested.id).list_ansible_roles() + assert ROLE_NAMES[0] not in [role['name'] for role in listroles_nested_hg] + assert ROLE_NAMES[2] == listroles_nested_hg[0]['name'] + assert ROLE_NAMES[1] == listroles_nested_hg[1]['name'] + + # Update host to be in parent hostgroup and verify roles assigned + host.hostgroup = hg + host = host.update(['hostgroup']) + listroles = target_sat.api.Host(id=host.id).list_ansible_roles() + assert ROLE_NAMES[0] == listroles[0]['name'] + assert listroles[0]['directly_assigned'] + assert ROLE_NAMES[1] == listroles[1]['name'] + assert not listroles[1]['directly_assigned'] + # Verify parent hostgroup doesn't contains the roles assigned to host + listroles_hg = target_sat.api.HostGroup(id=hg.id).list_ansible_roles() + assert ROLE_NAMES[0] not in [role['name'] for role in listroles_hg] + assert ROLE_NAMES[1] == listroles_hg[0]['name'] + + @pytest.mark.rhel_ver_match('[78]') + @pytest.mark.tier2 + def test_positive_read_facts_with_filter( + self, target_sat, rex_contenthost, filtered_user, module_org, module_location + ): + """Read host's Ansible facts as a user with a role that has host filter + + :id: 483d5faf-7a4c-4cb7-b14f-369768ad99b0 + + :steps: + 1. Run Ansible roles on a host + 2. Using API, read Ansible facts of that host + + :expectedresults: Ansible facts returned + + :BZ: 1699188 + + :customerscenario: true + """ + user, password = filtered_user + host = rex_contenthost.nailgun_host + host.organization = module_org + host.location = module_location + host.update(['organization', 'location']) + + # gather ansible facts by running ansible roles on the host + host.play_ansible_roles() + if is_open('BZ:2216471'): + wait_for( + lambda: len(rex_contenthost.nailgun_host.get_facts()) > 0, + timeout=30, + delay=2, + ) + user_cfg = user_nailgun_config(user.login, password) + # get facts through API + user_facts = ( + target_sat.api.Host(server_config=user_cfg) + .search(query={'search': f'name={rex_contenthost.hostname}'})[0] + .get_facts() + ) + assert 'subtotal' in user_facts + assert user_facts['subtotal'] == 1 + assert 'results' in user_facts + assert rex_contenthost.hostname in user_facts['results'] + assert len(user_facts['results'][rex_contenthost.hostname]) > 0 - :expectedresults: - 1. One of the jobs failing on a single host must impact the overall result as failed. - :BZ: 2167396, 2190464, 2184117 +class TestAnsibleREX: + """Test class for remote execution via Ansible :CaseComponent: Ansible-RemoteExecution """ - hosts = [rhel9_contenthost, rhel8_contenthost, rhel7_contenthost] - SELECTED_ROLE = 'RedHatInsights.insights-client' - for host in hosts: - result = host.register( + + @pytest.mark.e2e + @pytest.mark.no_containers + @pytest.mark.rhel_ver_match('[^6].*') + def test_positive_ansible_job_on_host( + self, target_sat, module_org, module_location, module_ak_with_synced_repo, rhel_contenthost + ): + """Test successful execution of Ansible Job on host. + + :id: c8dcdc54-cb98-4b24-bff9-049a6cc36acb + + :steps: + 1. Register a content host with satellite + 2. Import a role into satellite + 3. Assign that role to a host + 4. Assert that the role was assigned to the host successfully + 5. Run the Ansible playbook associated with that role + 6. Check if the job is executed. + + :expectedresults: + 1. Host should be assigned the proper role. + 2. Job execution must be successful. + + :BZ: 2164400 + """ + SELECTED_ROLE = 'RedHatInsights.insights-client' + if rhel_contenthost.os_version.major <= 7: + rhel_contenthost.create_custom_repos(rhel7=settings.repos.rhel7_os) + assert rhel_contenthost.execute('yum install -y insights-client').status == 0 + result = rhel_contenthost.register( module_org, module_location, module_ak_with_synced_repo.name, target_sat ) assert result.status == 0, f'Failed to register host: {result.stderr}' proxy_id = target_sat.nailgun_smart_proxy.id - target_host = host.nailgun_host + target_host = rhel_contenthost.nailgun_host target_sat.api.AnsibleRoles().sync( data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]} ) @@ -217,153 +368,105 @@ def test_positive_ansible_job_on_multiple_host( target_sat.api.Host(id=target_host.id).add_ansible_role(data={'ansible_role_id': role_id}) host_roles = target_host.list_ansible_roles() assert host_roles[0]['name'] == SELECTED_ROLE + assert target_host.name == rhel_contenthost.hostname - template_id = ( - target_sat.api.JobTemplate() - .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] - .id - ) - job = target_sat.api.JobInvocation().run( - synchronous=False, - data={ - 'job_template_id': template_id, - 'targeting_type': 'static_query', - 'search_query': f'name ^ ({hosts[0].hostname} && {hosts[1].hostname} ' - f'&& {hosts[2].hostname})', - }, - ) - target_sat.wait_for_tasks( - f'resource_type = JobInvocation and resource_id = {job["id"]}', - poll_timeout=1000, - must_succeed=False, - ) - result = target_sat.api.JobInvocation(id=job['id']).read() - assert result.succeeded == 2 # SELECTED_ROLE working on rhel8/rhel9 clients - assert result.failed == 1 # SELECTED_ROLE failing on rhel7 client - assert result.status_label == 'failed' - - -@pytest.mark.e2e -@pytest.mark.tier2 -@pytest.mark.upgrade -def test_add_and_remove_ansible_role_hostgroup(target_sat): - """ - Test add and remove functionality for ansible roles in hostgroup via API - - :id: 7672cf86-fa31-11ed-855a-0fd307d2d66b - - :steps: - 1. Create a hostgroup and a nested hostgroup - 2. Sync a few ansible roles - 3. Assign a few ansible roles with the host group - 4. Add some ansible role with the host group - 5. Add some ansible roles to the nested hostgroup - 6. Remove the added ansible roles from the parent and nested hostgroup - - :expectedresults: - 1. Ansible role assign/add/remove functionality should work as expected in API - - :BZ: 2164400 - """ - ROLE_NAMES = [ - 'theforeman.foreman_scap_client', - 'redhat.satellite.hostgroups', - 'RedHatInsights.insights-client', - 'redhat.satellite.compute_resources', - ] - hg = target_sat.api.HostGroup(name=gen_string('alpha')).create() - hg_nested = target_sat.api.HostGroup(name=gen_string('alpha'), parent=hg).create() - proxy_id = target_sat.nailgun_smart_proxy.id - target_sat.api.AnsibleRoles().sync(data={'proxy_id': proxy_id, 'role_names': ROLE_NAMES}) - ROLES = [ - target_sat.api.AnsibleRoles().search(query={'search': f'name={role}'})[0].id - for role in ROLE_NAMES - ] - # Assign first 2 roles to HG and verify it - target_sat.api.HostGroup(id=hg.id).assign_ansible_roles(data={'ansible_role_ids': ROLES[:2]}) - for r1, r2 in zip( - target_sat.api.HostGroup(id=hg.id).list_ansible_roles(), ROLE_NAMES[:2], strict=True - ): - assert r1['name'] == r2 - - # Add next role from list to HG and verify it - target_sat.api.HostGroup(id=hg.id).add_ansible_role(data={'ansible_role_id': ROLES[2]}) - for r1, r2 in zip( - target_sat.api.HostGroup(id=hg.id).list_ansible_roles(), ROLE_NAMES[:3], strict=True - ): - assert r1['name'] == r2 - - # Add next role to nested HG, and verify roles are also nested to HG along with assigned role - # Also, ensure the parent HG does not contain the roles assigned to nested HGs - target_sat.api.HostGroup(id=hg_nested.id).add_ansible_role(data={'ansible_role_id': ROLES[3]}) - for r1, r2 in zip( - target_sat.api.HostGroup(id=hg_nested.id).list_ansible_roles(), - [ROLE_NAMES[-1]] + ROLE_NAMES[:-1], - strict=True, - ): - assert r1['name'] == r2 - - for r1, r2 in zip( - target_sat.api.HostGroup(id=hg.id).list_ansible_roles(), ROLE_NAMES[:3], strict=True + template_id = ( + target_sat.api.JobTemplate() + .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] + .id + ) + job = target_sat.api.JobInvocation().run( + synchronous=False, + data={ + 'job_template_id': template_id, + 'targeting_type': 'static_query', + 'search_query': f'name = {rhel_contenthost.hostname}', + }, + ) + target_sat.wait_for_tasks( + f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000 + ) + result = target_sat.api.JobInvocation(id=job['id']).read() + assert result.succeeded == 1 + target_sat.api.Host(id=target_host.id).remove_ansible_role( + data={'ansible_role_id': role_id} + ) + host_roles = target_host.list_ansible_roles() + assert len(host_roles) == 0 + + @pytest.mark.no_containers + def test_positive_ansible_job_on_multiple_host( + self, + target_sat, + module_org, + rhel9_contenthost, + rhel8_contenthost, + rhel7_contenthost, + module_location, + module_ak_with_synced_repo, ): - assert r1['name'] == r2 - - # Remove roles assigned one by one from HG and nested HG - for role in ROLES[:3]: - target_sat.api.HostGroup(id=hg.id).remove_ansible_role(data={'ansible_role_id': role}) - hg_roles = target_sat.api.HostGroup(id=hg.id).list_ansible_roles() - assert len(hg_roles) == 0 - - for role in ROLES: - target_sat.api.HostGroup(id=hg_nested.id).remove_ansible_role( - data={'ansible_role_id': role} + """Test execution of Ansible job on multiple hosts simultaneously. + + :id: 9369feef-466c-40d3-9d0d-65520d7f21ef + + :customerscenario: true + + :steps: + 1. Register multiple content hosts with satellite + 2. Import a role into satellite + 3. Assign that role to all host + 4. Trigger ansible job keeping all host in a single query + 5. Check the passing and failing of individual hosts + 6. Check if one of the job on a host is failed resulting into whole job is marked as failed. + + :expectedresults: + 1. One of the jobs failing on a single host must impact the overall result as failed. + + :BZ: 2167396, 2190464, 2184117 + """ + hosts = [rhel9_contenthost, rhel8_contenthost, rhel7_contenthost] + SELECTED_ROLE = 'RedHatInsights.insights-client' + for host in hosts: + result = host.register( + module_org, module_location, module_ak_with_synced_repo.name, target_sat + ) + assert result.status == 0, f'Failed to register host: {result.stderr}' + proxy_id = target_sat.nailgun_smart_proxy.id + target_host = host.nailgun_host + target_sat.api.AnsibleRoles().sync( + data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]} + ) + role_id = ( + target_sat.api.AnsibleRoles() + .search(query={'search': f'name={SELECTED_ROLE}'})[0] + .id + ) + target_sat.api.Host(id=target_host.id).add_ansible_role( + data={'ansible_role_id': role_id} + ) + host_roles = target_host.list_ansible_roles() + assert host_roles[0]['name'] == SELECTED_ROLE + + template_id = ( + target_sat.api.JobTemplate() + .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] + .id ) - hg_nested_roles = target_sat.api.HostGroup(id=hg_nested.id).list_ansible_roles() - assert len(hg_nested_roles) == 0 - - -@pytest.mark.rhel_ver_match('[78]') -@pytest.mark.tier2 -def test_positive_read_facts_with_filter( - target_sat, rex_contenthost, filtered_user, rex_host_in_org_and_loc -): - """ - Read host's Ansible facts as a user with a role that has host filter - - :id: 483d5faf-7a4c-4cb7-b14f-369768ad99b0 - - 1. Run Ansible roles on a host - 2. Using API, read Ansible facts of that host - - :expectedresults: Ansible facts returned - - :BZ: 1699188 - - :customerscenario: true - """ - user, password = filtered_user - host = rex_host_in_org_and_loc - - # gather ansible facts by running ansible roles on the host - host.play_ansible_roles() - if is_open('BZ:2216471'): - host_wait = target_sat.api.Host().search( - query={'search': f'name={rex_contenthost.hostname}'} - )[0] - wait_for( - lambda: len(host_wait.get_facts()) > 0, - timeout=30, - delay=2, + job = target_sat.api.JobInvocation().run( + synchronous=False, + data={ + 'job_template_id': template_id, + 'targeting_type': 'static_query', + 'search_query': f'name ^ ({hosts[0].hostname} && {hosts[1].hostname} ' + f'&& {hosts[2].hostname})', + }, ) - - user_cfg = user_nailgun_config(user.login, password) - host = target_sat.api.Host(server_config=user_cfg).search( - query={'search': f'name={rex_contenthost.hostname}'} - )[0] - # get facts through API - facts = host.get_facts() - assert 'subtotal' in facts - assert facts['subtotal'] == 1 - assert 'results' in facts - assert rex_contenthost.hostname in facts['results'] - assert len(facts['results'][rex_contenthost.hostname]) > 0 + target_sat.wait_for_tasks( + f'resource_type = JobInvocation and resource_id = {job["id"]}', + poll_timeout=1000, + must_succeed=False, + ) + result = target_sat.api.JobInvocation(id=job['id']).read() + assert result.succeeded == 2 # SELECTED_ROLE working on rhel8/rhel9 clients + assert result.failed == 1 # SELECTED_ROLE failing on rhel7 client + assert result.status_label == 'failed' diff --git a/tests/foreman/api/test_architecture.py b/tests/foreman/api/test_architecture.py index 362abbfae32..b40173fe21f 100644 --- a/tests/foreman/api/test_architecture.py +++ b/tests/foreman/api/test_architecture.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_choice import pytest from requests.exceptions import HTTPError diff --git a/tests/foreman/api/test_audit.py b/tests/foreman/api/test_audit.py index a90eda83884..430657d9c8e 100644 --- a/tests/foreman/api/test_audit.py +++ b/tests/foreman/api/test_audit.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.utils.datafactory import gen_string diff --git a/tests/foreman/api/test_bookmarks.py b/tests/foreman/api/test_bookmarks.py index b51ac194cb3..9b532a48155 100644 --- a/tests/foreman/api/test_bookmarks.py +++ b/tests/foreman/api/test_bookmarks.py @@ -11,17 +11,18 @@ :CaseImportance: High """ + import random from fauxfactory import gen_string import pytest from requests.exceptions import HTTPError -from robottelo.constants import BOOKMARK_ENTITIES +from robottelo.constants import BOOKMARK_ENTITIES_SELECTION from robottelo.utils.datafactory import invalid_values_list, valid_data_list # List of unique bookmark controller values, preserving order -CONTROLLERS = list(dict.fromkeys(entity['controller'] for entity in BOOKMARK_ENTITIES)) +CONTROLLERS = list(dict.fromkeys(entity['controller'] for entity in BOOKMARK_ENTITIES_SELECTION)) @pytest.mark.tier1 diff --git a/tests/foreman/api/test_capsule.py b/tests/foreman/api/test_capsule.py index 86aaa089922..5600cc040a8 100644 --- a/tests/foreman/api/test_capsule.py +++ b/tests/foreman/api/test_capsule.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + from fauxfactory import gen_string, gen_url import pytest from requests import HTTPError @@ -46,7 +47,8 @@ def test_positive_update_capsule(request, pytestconfig, target_sat, module_capsu # refresh features features = capsule.refresh() - module_capsule_configured.run_installer_arg('enable-foreman-proxy-plugin-openscap') + result = module_capsule_configured.install(cmd_args=['enable-foreman-proxy-plugin-openscap']) + assert result.status == 0, 'Installer failed when enabling OpenSCAP plugin.' features_new = capsule.refresh() assert len(features_new["features"]) == len(features["features"]) + 1 assert 'Openscap' in [feature["name"] for feature in features_new["features"]] diff --git a/tests/foreman/api/test_classparameters.py b/tests/foreman/api/test_classparameters.py index 931a11edec2..d74e708bd61 100644 --- a/tests/foreman/api/test_classparameters.py +++ b/tests/foreman/api/test_classparameters.py @@ -11,6 +11,7 @@ :Team: Rocket """ + import json from random import choice diff --git a/tests/foreman/api/test_computeprofile.py b/tests/foreman/api/test_computeprofile.py index 6d374bc8b34..785278ee2b6 100644 --- a/tests/foreman/api/test_computeprofile.py +++ b/tests/foreman/api/test_computeprofile.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from requests.exceptions import HTTPError diff --git a/tests/foreman/api/test_computeresource_azurerm.py b/tests/foreman/api/test_computeresource_azurerm.py index 47bf8313066..d8751f65786 100644 --- a/tests/foreman/api/test_computeresource_azurerm.py +++ b/tests/foreman/api/test_computeresource_azurerm.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/api/test_computeresource_gce.py b/tests/foreman/api/test_computeresource_gce.py index 2d088f4f2a2..8f27e9b98fe 100644 --- a/tests/foreman/api/test_computeresource_gce.py +++ b/tests/foreman/api/test_computeresource_gce.py @@ -14,6 +14,7 @@ :CaseImportance: High """ + import random from fauxfactory import gen_string diff --git a/tests/foreman/api/test_computeresource_libvirt.py b/tests/foreman/api/test_computeresource_libvirt.py index ad95fb9f202..ce0c0ff0b9b 100644 --- a/tests/foreman/api/test_computeresource_libvirt.py +++ b/tests/foreman/api/test_computeresource_libvirt.py @@ -15,6 +15,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest from requests.exceptions import HTTPError diff --git a/tests/foreman/api/test_contentcredentials.py b/tests/foreman/api/test_contentcredentials.py index 3f088ae6253..2263edca0d1 100644 --- a/tests/foreman/api/test_contentcredentials.py +++ b/tests/foreman/api/test_contentcredentials.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from copy import copy from fauxfactory import gen_string diff --git a/tests/foreman/api/test_contentview.py b/tests/foreman/api/test_contentview.py index 03c95999417..9347cce882e 100644 --- a/tests/foreman/api/test_contentview.py +++ b/tests/foreman/api/test_contentview.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import random from fauxfactory import gen_integer, gen_string, gen_utf8 @@ -21,6 +22,7 @@ from robottelo.constants import ( CONTAINER_REGISTRY_HUB, CUSTOM_RPM_SHA_512_FEED_COUNT, + DEFAULT_ARCHITECTURE, FILTER_ERRATA_TYPE, PERMISSIONS, PRDS, @@ -278,6 +280,63 @@ def test_positive_add_sha512_rpm(self, content_view, module_org, module_target_s content_view_version.errata_counts['total'] == CUSTOM_RPM_SHA_512_FEED_COUNT['errata'] ) + @pytest.mark.tier2 + def test_ccv_promote_registry_name_change(self, module_target_sat, module_sca_manifest_org): + """Testing CCV promotion scenarios where the registry_name has been changed to some + specific value. + + :id: 41641d4a-d144-4833-869a-284624df2410 + + :steps: + + 1) Sync a RH Repo + 2) Create a CV, add the repo and publish it + 3) Create a CCV and add the CV version to it, then publish it + 4) Create LCEs with the specific value for registry_name + 5) Promote the CCV to both LCEs + + :expectedresults: CCV can be promoted to both LCEs without issue. + + :CaseImportance: High + + :customerscenario: true + + :BZ: 2153523 + """ + rh_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=DEFAULT_ARCHITECTURE, + org_id=module_sca_manifest_org.id, + product=REPOS['kickstart']['rhel8_aps']['product'], + repo=REPOS['kickstart']['rhel8_aps']['name'], + reposet=REPOS['kickstart']['rhel8_aps']['reposet'], + releasever=REPOS['kickstart']['rhel8_aps']['version'], + ) + repo = module_target_sat.api.Repository(id=rh_repo_id).read() + repo.sync(timeout=600) + cv = module_target_sat.api.ContentView(organization=module_sca_manifest_org).create() + cv = module_target_sat.api.ContentView(id=cv.id, repository=[repo]).update(["repository"]) + cv.publish() + cv = cv.read() + composite_cv = module_target_sat.api.ContentView( + organization=module_sca_manifest_org, composite=True + ).create() + composite_cv.component = [cv.version[0]] + composite_cv = composite_cv.update(['component']) + composite_cv.publish() + composite_cv = composite_cv.read() + # Create LCEs with the specific registry value + lce1 = module_target_sat.api.LifecycleEnvironment( + organization=module_sca_manifest_org, + registry_name_pattern='<%= repository.name %>', + ).create() + lce2 = module_target_sat.api.LifecycleEnvironment( + organization=module_sca_manifest_org, + registry_name_pattern='<%= lifecycle_environment.label %>/<%= repository.name %>', + ).create() + version = composite_cv.version[0].read() + assert 'success' in version.promote(data={'environment_ids': lce1.id})['result'] + assert 'success' in version.promote(data={'environment_ids': lce2.id})['result'] + class TestContentViewCreate: """Create tests for content views.""" diff --git a/tests/foreman/api/test_contentviewfilter.py b/tests/foreman/api/test_contentviewfilter.py index 6360bf5da16..0516582ed5e 100644 --- a/tests/foreman/api/test_contentviewfilter.py +++ b/tests/foreman/api/test_contentviewfilter.py @@ -15,6 +15,7 @@ :CaseImportance: High """ + import http from random import randint diff --git a/tests/foreman/api/test_contentviewversion.py b/tests/foreman/api/test_contentviewversion.py index 5f3414456e3..b870ffb19f7 100644 --- a/tests/foreman/api/test_contentviewversion.py +++ b/tests/foreman/api/test_contentviewversion.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest from requests.exceptions import HTTPError diff --git a/tests/foreman/api/test_convert2rhel.py b/tests/foreman/api/test_convert2rhel.py index 37bbfa30058..580c5620d55 100644 --- a/tests/foreman/api/test_convert2rhel.py +++ b/tests/foreman/api/test_convert2rhel.py @@ -11,6 +11,7 @@ :Team: Rocket """ + import pytest import requests diff --git a/tests/foreman/api/test_discoveredhost.py b/tests/foreman/api/test_discoveredhost.py index 59783b9a8b6..31ad0b1d415 100644 --- a/tests/foreman/api/test_discoveredhost.py +++ b/tests/foreman/api/test_discoveredhost.py @@ -9,6 +9,7 @@ :CaseAutomation: Automated """ + import re from fauxfactory import gen_choice, gen_ipaddr, gen_mac, gen_string @@ -313,7 +314,7 @@ def test_positive_auto_provision_all( :expectedresults: All discovered hosts should be auto-provisioned successfully - :CaseAutomation: NotAutomated + :CaseAutomation: Automated :CaseImportance: High """ diff --git a/tests/foreman/api/test_discoveryrule.py b/tests/foreman/api/test_discoveryrule.py index 3d1c1f4bf48..2ffddb51711 100644 --- a/tests/foreman/api/test_discoveryrule.py +++ b/tests/foreman/api/test_discoveryrule.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_choice, gen_integer, gen_string import pytest from requests.exceptions import HTTPError diff --git a/tests/foreman/api/test_docker.py b/tests/foreman/api/test_docker.py index eab9d213db5..6bebe5e81f1 100644 --- a/tests/foreman/api/test_docker.py +++ b/tests/foreman/api/test_docker.py @@ -7,6 +7,7 @@ :CaseImportance: High """ + from random import choice, randint, shuffle from fauxfactory import gen_string, gen_url diff --git a/tests/foreman/api/test_environment.py b/tests/foreman/api/test_environment.py index 02f7a3928e3..d0e62dbe6a9 100644 --- a/tests/foreman/api/test_environment.py +++ b/tests/foreman/api/test_environment.py @@ -15,6 +15,7 @@ :CaseImportance: Critical """ + from fauxfactory import gen_string import pytest from requests.exceptions import HTTPError diff --git a/tests/foreman/api/test_errata.py b/tests/foreman/api/test_errata.py index cb831425778..0bbfda852b8 100644 --- a/tests/foreman/api/test_errata.py +++ b/tests/foreman/api/test_errata.py @@ -11,15 +11,30 @@ :CaseImportance: High """ + # For ease of use hc refers to host-collection throughout this document -from time import sleep +from time import sleep, time -from nailgun import entities import pytest -from robottelo import constants from robottelo.config import settings -from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME +from robottelo.constants import ( + DEFAULT_ARCHITECTURE, + FAKE_1_CUSTOM_PACKAGE, + FAKE_2_CUSTOM_PACKAGE, + FAKE_2_CUSTOM_PACKAGE_NAME, + FAKE_4_CUSTOM_PACKAGE, + FAKE_4_CUSTOM_PACKAGE_NAME, + FAKE_5_CUSTOM_PACKAGE, + FAKE_9_YUM_OUTDATED_PACKAGES, + FAKE_9_YUM_SECURITY_ERRATUM, + FAKE_9_YUM_UPDATED_PACKAGES, + PRDS, + REAL_RHEL8_1_ERRATA_ID, + REAL_RHEL8_1_PACKAGE_FILENAME, + REPOS, + REPOSET, +) pytestmark = [ pytest.mark.run_in_one_thread, @@ -30,25 +45,53 @@ CUSTOM_REPO_URL = settings.repos.yum_9.url CUSTOM_REPO_ERRATA_ID = settings.repos.yum_6.errata[2] +ERRATA = [ + { + 'id': settings.repos.yum_6.errata[2], # security advisory + 'old_package': FAKE_1_CUSTOM_PACKAGE, + 'new_package': FAKE_2_CUSTOM_PACKAGE, + 'package_name': FAKE_2_CUSTOM_PACKAGE_NAME, + }, + { + 'id': settings.repos.yum_6.errata[0], # bugfix advisory + 'old_package': FAKE_4_CUSTOM_PACKAGE, + 'new_package': FAKE_5_CUSTOM_PACKAGE, + 'package_name': FAKE_4_CUSTOM_PACKAGE_NAME, + }, +] +REPO_WITH_ERRATA = { + 'url': settings.repos.yum_9.url, + 'errata': ERRATA, + 'errata_ids': settings.repos.yum_9.errata, +} @pytest.fixture(scope='module') -def activation_key(module_org, module_lce, module_target_sat): +def activation_key(module_sca_manifest_org, module_cv, module_lce, module_target_sat): + """A new Activation Key associated with published version + of module_cv, promoted to module_lce.""" + _cv = cv_publish_promote( + module_target_sat, + module_sca_manifest_org, + module_cv, + module_lce, + )['content-view'] return module_target_sat.api.ActivationKey( - environment=module_lce, organization=module_org + organization=module_sca_manifest_org, + environment=module_lce, + content_view=_cv, ).create() @pytest.fixture(scope='module') -def rh_repo( - module_entitlement_manifest_org, module_lce, module_cv, activation_key, module_target_sat -): +def rh_repo(module_sca_manifest_org, module_lce, module_cv, activation_key, module_target_sat): + "rhel8 rh repos with errata and outdated/updated packages" return module_target_sat.cli_factory.setup_org_for_a_rh_repo( { - 'product': constants.PRDS['rhel'], - 'repository-set': constants.REPOSET['rhst7'], - 'repository': constants.REPOS['rhst7']['name'], - 'organization-id': module_entitlement_manifest_org.id, + 'product': PRDS['rhel'], + 'repository-set': REPOSET['rhst8'], + 'repository': REPOS['rhst8']['name'], + 'organization-id': module_sca_manifest_org.id, 'content-view-id': module_cv.id, 'lifecycle-environment-id': module_lce.id, 'activationkey-id': activation_key.id, @@ -57,11 +100,12 @@ def rh_repo( @pytest.fixture(scope='module') -def custom_repo(module_org, module_lce, module_cv, activation_key, module_target_sat): +def custom_repo(module_sca_manifest_org, module_lce, module_cv, activation_key, module_target_sat): + "zoo repos with errata and outdated/updated packages" return module_target_sat.cli_factory.setup_org_for_a_custom_repo( { - 'url': settings.repos.yum_9.url, - 'organization-id': module_org.id, + 'url': CUSTOM_REPO_URL, + 'organization-id': module_sca_manifest_org.id, 'content-view-id': module_cv.id, 'lifecycle-environment-id': module_lce.id, 'activationkey-id': activation_key.id, @@ -69,54 +113,7 @@ def custom_repo(module_org, module_lce, module_cv, activation_key, module_target ) -def _install_package( - module_org, clients, host_ids, package_name, via_ssh=True, rpm_package_name=None -): - """Install package via SSH CLI if via_ssh is True, otherwise - install via http api: PUT /api/v2/hosts/bulk/install_content - """ - if via_ssh: - for client in clients: - result = client.run(f'yum install -y {package_name}') - assert result.status == 0 - result = client.run(f'rpm -q {package_name}') - assert result.status == 0 - else: - entities.Host().install_content( - data={ - 'organization_id': module_org.id, - 'included': {'ids': host_ids}, - 'content_type': 'package', - 'content': [package_name], - } - ) - _validate_package_installed(clients, rpm_package_name) - - -def _validate_package_installed(hosts, package_name, expected_installed=True, timeout=240): - """Check whether package was installed on the list of hosts.""" - for host in hosts: - for _ in range(timeout // 15): - result = host.run(f'rpm -q {package_name}') - if ( - result.status == 0 - and expected_installed - or result.status != 0 - and not expected_installed - ): - break - sleep(15) - else: - pytest.fail( - 'Package {} was not {} host {}'.format( - package_name, - 'installed on' if expected_installed else 'removed from', - host.hostname, - ) - ) - - -def _validate_errata_counts(module_org, host, errata_type, expected_value, timeout=120): +def _validate_errata_counts(host, errata_type, expected_value, timeout=120): """Check whether host contains expected errata counts.""" for _ in range(timeout // 5): host = host.read() @@ -125,8 +122,7 @@ def _validate_errata_counts(module_org, host, errata_type, expected_value, timeo sleep(5) else: pytest.fail( - 'Host {} contains {} {} errata, but expected to contain ' - '{} of them'.format( + 'Host {} contains {} {} errata, but expected to contain {} of them'.format( host.name, host.content_facet_attributes['errata_counts'][errata_type], errata_type, @@ -135,113 +131,692 @@ def _validate_errata_counts(module_org, host, errata_type, expected_value, timeo ) -def _fetch_available_errata(module_org, host, expected_amount, timeout=120): +def _fetch_available_errata(host, expected_amount=None, timeout=120): """Fetch available errata for host.""" errata = host.errata() for _ in range(timeout // 5): - if len(errata['results']) == expected_amount: + if expected_amount is None or len(errata['results']) == expected_amount: return errata['results'] sleep(5) errata = host.errata() else: pytest.fail( - 'Host {} contains {} available errata, but expected to ' - 'contain {} of them'.format(host.name, len(errata['results']), expected_amount) + 'Host {} contains {} available errata, but expected to contain {} of them'.format( + host.name, + len(errata['results']), + expected_amount if not None else 'No expected_amount provided', + ) + ) + + +def _fetch_available_errata_instances(sat, host, expected_amount=None, timeout=120): + """Fetch list of instances of avaliable errata for host.""" + _errata_dict = _fetch_available_errata(host.nailgun_host, expected_amount, timeout) + _errata_ids = [errata['id'] for errata in _errata_dict] + instances = [sat.api.Errata(id=_id).read() for _id in _errata_ids] + assert ( + len(instances) == len(_errata_dict) == host.applicable_errata_count + ), 'Length of errata instances list or api result differs from expected applicable count.' + return instances + + +def errata_id_set(erratum_list): + """Return a set of unique errata id's, passing list of errata instances, or dictionary. + :raise: `AssertionError`: if errata_id could not be found from a list entry. + :return: set{string} + """ + result = set() + try: + # erratum_list is a list of errata instances + result = set(e.errata_id for e in erratum_list) + except Exception: + try: + # erratum_list is a list of errata dictionary references + result = set(e['errata_id'] for e in erratum_list) + except Exception as err: + # some errata_id cannot be extracted from an entry in erratum_list + raise AssertionError( + 'Must take a dictionary ref or list of erratum instances, each entry needs attribute or key "errata_id".' + f' An entry in the given erratum_list had no discernible "errata_id". Errata(s): {erratum_list}.' + ) from err + return result + + +def package_applicability_changed_as_expected( + sat, + host, + package_filename, + prior_applicable_errata_list, + prior_applicable_errata_count, + prior_applicable_package_count, + return_applicables=False, +): + """Checks that after installing some package, updated any impacted errata(s) + status and host applicability count, and changed applicable package count by one. + + That one of the following occured: + - A non-applicable package was modified, or the same prior version was installed, + the amount of applicable errata and applicable packages remains the same. + Return False, as no applicability changes occured. + + - An Outdated applicable package was installed. Errata applicability increased + by the number of found applicable errata containing that package, + if the errata were not already applicable prior to install. + The number of applicable packages increased by one. + + - An Updated applicable package was installed. Errata applicability decreased + by the amount of found errata containing that package, if the errata are + no longer applicable, but they were prior to install, if any. + The number of applicable packages decreased by one. + + :param string: package_filename: + the full filename of the package version installed. + :param list: prior_applicable_errata_list: + list of all erratum instances from search, that were applicable before modifying package. + :param int prior_applicable_errata_count: + number of total applicable errata prior to modifying package. + :param int prior_applicable_package_count: + number of total applicable packages prior to modifying package. + :param boolean return_applicables (False): if set to True, and method's 'result' is not False: + return a dict containing result, and relevant package and errata information. + + :raise: `AssertionError` if: + Expected changes are not found. + Changes are made to unexpected errata or packages. + A non-readable prior list of erratum was passed. + :return: result(boolean), or relevant applicables(dict) + False if found that no applicable package was modified. + True if method finished executing, expected changes were found. + + :return_applicables: if True: return dict of relevant applicable and changed entities: + result boolean: True, method finished executing + errata_count int: current applicable errata count + package_count int: current applicable package count + current_package string: current version filename of package + prior_package string: previous version filename of package + change_in_errata int: positive, negative, or zero + changed_errata list[string]: of modified errata_ids + """ + assert ( + len(prior_applicable_errata_list) == prior_applicable_errata_count + ), 'Length of "prior_applicable_errata_list" passed, must equal "prior_applicable_errata_count" passed.' + if len(prior_applicable_errata_list) != 0: + try: + prior_applicable_errata_list[0].read() + except Exception as err: + raise AssertionError( + 'Exception on read of index zero in passed parameter "prior_applicable_errata_list".' + ' Must pass a list of readable erratum instances, or empty list.' + ) from err + # schedule errata applicability recalculate for most current status + task = None + epoch_timestamp = int(time() - 1) + result = host.execute('subscription-manager repos') + assert ( + result.status == 0 + ), f'Command "subscription-manager repos" failed to execute on host: {host.hostname},\n{result}' + + try: + task = sat.api_factory.wait_for_errata_applicability_task( + host_id=host.nailgun_host.id, + from_when=epoch_timestamp, + ) + except AssertionError: + # No task for forced applicability regenerate, + # applicability was already up to date + assert task is None + package_basename = str(package_filename.split("-", 1)[0]) # 'package-4.0-1.rpm' > 'package' + prior_unique_errata_ids = errata_id_set(prior_applicable_errata_list) + current_applicable_errata = _fetch_available_errata_instances(sat, host) + app_unique_errata_ids = errata_id_set(current_applicable_errata) + app_errata_with_package_diff = [] + app_errata_diff_ids = set() + + if prior_applicable_errata_count == host.applicable_errata_count: + # Applicable errata count had no change. + # we expect applicable errata id(s) from search also did not change. + assert ( + prior_unique_errata_ids == app_unique_errata_ids + ), 'Expected list of applicable erratum to remain the same.' + if prior_applicable_package_count == host.applicable_package_count: + # no applicable packages were modified + return False + + if prior_applicable_errata_count != host.applicable_errata_count: + # Modifying package changed errata applicability. + # we expect one or more errata id(s) from search to be added or removed. + difference = abs(prior_applicable_errata_count - host.applicable_errata_count) + # Check list of errata id(s) from search matches expected difference + assert ( + len(app_unique_errata_ids) == prior_applicable_errata_count + difference + ), 'Length of applicable errata found by search, does not match applicability count difference.' + # modifying package increased errata applicability count (outdated ver installed) + if prior_applicable_errata_count < host.applicable_errata_count: + # save the new errata(s) found, ones added since package modify + app_errata_with_package_diff = [ + errata + for errata in current_applicable_errata + if ( + any(package_basename in p for p in errata.packages) + and errata.errata_id not in prior_unique_errata_ids + ) + ] + # modifying package decreased errata applicability count (updated ver installed) + elif prior_applicable_errata_count > host.applicable_errata_count: + # save the old errata(s) found, ones removed since package modify + app_errata_with_package_diff = [ + errata + for errata in current_applicable_errata + if ( + not any(package_basename in p.filename for p in errata.packages) + and errata.errata_id in prior_unique_errata_ids + ) + ] + app_errata_diff_ids = errata_id_set(app_errata_with_package_diff) + assert len(app_errata_diff_ids) > 0, ( + f'Applicable errata count changed by {difference}, after modifying {package_filename},' + ' but could not find any affected errata(s) with packages list' + f' that contains a matching package_basename: {package_basename}.' ) + # Check that applicable_package_count changed, + # if not, an applicable package was not modified. + if prior_applicable_package_count == host.applicable_package_count: + # if applicable packages remains the same, errata should also be the same + assert prior_applicable_errata_count == host.applicable_errata_count + assert prior_unique_errata_ids == app_unique_errata_ids + # no applicable errata were impaced by package install + return False + # is current errata list different from one prior to package install ? + if app_unique_errata_ids != prior_unique_errata_ids: + difference = len(app_unique_errata_ids) - len(prior_unique_errata_ids) + # check diff in applicable counts, is equal to diff in length of errata search results. + assert prior_applicable_errata_count + difference == host.applicable_errata_count + + """ Check applicable_package count changed by one. + we expect applicable_errata_count increased/decrease, + only by number of 'new' or 'removed' applicable errata, if any. + """ + if app_errata_with_package_diff: + if host.applicable_errata_count > prior_applicable_errata_count: + """Current applicable errata count is higher than before install, + An outdated package is expected to have been installed. + Check applicable package count increased by one. + Check applicable errata count increased by number + of newly applicable errata. + """ + assert prior_applicable_package_count + 1 == host.applicable_package_count + expected_increase = 0 + if app_unique_errata_ids != prior_unique_errata_ids: + difference = len(app_unique_errata_ids) - prior_applicable_errata_count + assert prior_applicable_errata_count + difference == host.applicable_errata_count + expected_increase = len(app_errata_diff_ids) + assert prior_applicable_errata_count + expected_increase == host.applicable_errata_count + + elif host.applicable_errata_count < prior_applicable_errata_count: + """Current applicable errata count is lower than before install, + An updated package is expected to have been installed. + Check applicable package count decreased by one. + Check applicable errata count decreased by number of + prior applicable errata, that are no longer found. + """ + if host.applicable_errata_count < prior_applicable_errata_count: + assert host.applicable_package_count == prior_applicable_package_count - 1 + expected_decrease = 0 + if app_unique_errata_ids != prior_unique_errata_ids: + difference = len(app_unique_errata_ids) - len(prior_applicable_errata_count) + assert prior_applicable_errata_count + difference == host.applicable_errata_count + expected_decrease = len(app_errata_diff_ids) + assert prior_applicable_errata_count - expected_decrease == host.applicable_errata_count + else: + # We found by search an errata that was added or removed compared to prior install, + # But we also found that applicable_errata_count had not changed. + raise AssertionError( + f'Found one or more different errata: {app_errata_diff_ids},' + ' from those present prior to install, but applicable count did not change,' + f' {host.applicable_errata_count} were found, but expected {host.applicable_errata_count + len(app_errata_diff_ids)}.' + ) + else: + # already checked that applicable package count changed, + # but found applicable erratum list should not change, + # check the errata count and list remained the same. + assert ( + host.applicable_errata_count == prior_applicable_errata_count + ), 'Expected current applicable errata count, to equal prior applicable errata count.' + assert ( + len(current_applicable_errata) == prior_applicable_errata_count + ), 'Expected current applicable errata list length, to equal to prior applicable count.' + assert prior_unique_errata_ids == app_unique_errata_ids, ( + f'Expected set of prior applicable errata_ids: {prior_unique_errata_ids},' + f' to be equivalent to set of current applicable errata_ids: {app_unique_errata_ids}.' + ) + if return_applicables is True: + change_in_errata = len(app_unique_errata_ids) - prior_applicable_errata_count + output = host.execute(f'rpm -q {package_basename}').stdout + current_package = output[:-1] + assert package_basename in current_package + if current_package == package_filename: # noqa: SIM108 + # we have already checked if applicable package count changed, + # in case the same version as prior was installed and present. + prior_package = None # package must not have been present before this modification + else: + prior_package = package_filename + return { + 'result': True, + 'errata_count': host.applicable_errata_count, + 'package_count': host.applicable_package_count, + 'current_package': current_package, + 'prior_package': prior_package, + 'change_in_errata': change_in_errata, + 'changed_errata': list(app_errata_diff_ids), + } + return True + + +def cv_publish_promote(sat, org, cv, lce=None, needs_publish=True): + """Publish & promote Content View Version with all content visible in org. + + :param lce: if None, default to 'Library', + pass a single instance of lce, or list of instances. + :param bool needs_publish: if False, skip publish of a new version + :return dictionary: + 'content-view': instance of updated cv + 'content-view-version': instance of newest cv version + """ + # Default to 'Library' lce, if None passed + # Take a single instance of lce, or list of instances + lce_ids = 'Library' + if lce is not None: + lce_ids = [lce.id] if not isinstance(lce, list) else [_lce.id for _lce in lce] + + if needs_publish is True: + _publish_and_wait(sat, org, cv) + # Content-view must have at least one published version + cv = sat.api.ContentView(id=cv.id).read() + assert cv.version, f'No version(s) are published to the Content-View: {cv.id}' + # Find highest version id, will be the latest + cvv_id = max(cvv.id for cvv in cv.version) + # Promote to lifecycle-environment(s) + if lce_ids == 'Library': + library_lce = cv.environment[0].read() + sat.api.ContentViewVersion(id=cvv_id).promote( + data={'environment_ids': library_lce.id, 'force': 'True'} + ) + else: + sat.api.ContentViewVersion(id=cvv_id).promote(data={'environment_ids': lce_ids}) + _result = { + 'content-view': sat.api.ContentView(id=cv.id).read(), + 'content-view-version': sat.api.ContentViewVersion(id=cvv_id).read(), + } + assert all( + entry for entry in _result.values() + ), f'One or more necessary components are missing: {_result}' + return _result + + +def _publish_and_wait(sat, org, cv): + """Publish a new version of content-view to organization, wait for task(s) completion.""" + task_id = sat.api.ContentView(id=cv.id).publish({'id': cv.id, 'organization': org})['id'] + assert task_id, f'No task was invoked to publish the Content-View: {cv.id}.' + # Should take < 1 minute, check in 5s intervals + ( + sat.wait_for_tasks( + search_query=(f'label = Actions::Katello::ContentView::Publish and id = {task_id}'), + search_rate=5, + max_tries=12, + ), + ( + f'Failed to publish the Content-View: {cv.id}, in time.' + f'Task: {task_id} failed, or timed out (60s).' + ), + ) @pytest.mark.upgrade @pytest.mark.tier3 -@pytest.mark.rhel_ver_list([7, 8, 9]) +@pytest.mark.rhel_ver_match('[^6]') @pytest.mark.no_containers -def test_positive_install_in_hc(module_org, activation_key, custom_repo, target_sat, content_hosts): +@pytest.mark.e2e +def test_positive_install_in_hc( + module_sca_manifest_org, + activation_key, + module_cv, + module_lce, + custom_repo, + target_sat, + content_hosts, +): """Install errata in a host-collection :id: 6f0242df-6511-4c0f-95fc-3fa32c63a064 - :Setup: Errata synced on satellite server. + :Setup: + 1. Some Unregistered hosts. + 2. Errata synced on satellite server. + + :Steps: + 1. Setup custom repo for each client, publish & promote content-view. + 2. Register clients as content hosts, install one outdated custom package on each client. + 3. Create Host Collection from clients, install errata to clients by Host Collection. + 4. PUT /api/v2/hosts/bulk/update_content - :steps: PUT /api/v2/hosts/bulk/update_content + :expectedresults: + 1. package install invokes errata applicability recalculate + 2. errata is installed in the host-collection + 3. errata installation invokes applicability recalculate + 4. updated custom package is found on the contained hosts + + :CaseImportance: Medium - :expectedresults: errata is installed in the host-collection. :BZ: 1983043 """ + # custom_repo already in published a module_cv version + repo_id = custom_repo['repository-id'] + # just promote to lce, do not publish + cv_publish_promote( + target_sat, module_sca_manifest_org, module_cv, module_lce, needs_publish=False + ) + # Each client: create custom repo, register as content host to cv, install outdated package for client in content_hosts: - client.install_katello_ca(target_sat) - client.register_contenthost(module_org.label, activation_key.name) - assert client.subscribed + _repo = target_sat.api.Repository(id=repo_id).read() + client.create_custom_repos(**{f'{_repo.name}': _repo.url}) + result = client.register( + org=module_sca_manifest_org, + activation_keys=activation_key.name, + target=target_sat, + loc=None, + ) + assert ( + result.status == 0 + ), f'Failed to register the host - {client.hostname}: {result.stderr}' client.add_rex_key(satellite=target_sat) - host_ids = [client.nailgun_host.id for client in content_hosts] - _install_package( - module_org, - clients=content_hosts, - host_ids=host_ids, - package_name=constants.FAKE_1_CUSTOM_PACKAGE, - ) - host_collection = target_sat.api.HostCollection(organization=module_org).create() + assert client.subscribed + client.run(r'subscription-manager repos --enable \*') + # Remove custom package by name + client.run(f'yum remove -y {FAKE_2_CUSTOM_PACKAGE_NAME}') + # No applicable errata or packages to start + assert (pre_errata_count := client.applicable_errata_count) == 0 + assert (pre_package_count := client.applicable_package_count) == 0 + prior_app_errata = _fetch_available_errata_instances(target_sat, client, expected_amount=0) + # 1s margin of safety for rounding + epoch_timestamp = int(time() - 1) + # install outdated version + assert client.run(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}').status == 0 + target_sat.api_factory.wait_for_errata_applicability_task( + host_id=client.nailgun_host.id, + from_when=epoch_timestamp, + ) + assert client.run(f'rpm -q {FAKE_1_CUSTOM_PACKAGE}').status == 0 + # One errata now applicable on client + assert client.applicable_errata_count == 1 + # One package now has an applicable errata + assert client.applicable_package_count == 1 + # Fetch the new errata instance(s), expecting only one + _fetch_available_errata_instances(target_sat, client, expected_amount=1) + + """ Did installing outdated package, update applicability as expected? + * Call method package_applicability_changed_as_expected * + returns: False if no applicability change occured or expected (package not applicable). + True if applicability changes were expected and occured (package is applicable). + raises: `AssertionError` if any expected changes did not occur, or unexpected changes were found. + + Expected: that each outdated package install: updated one or more errata to applicable, + if those now applicable errata(s) were not already applicable to some package prior. + """ + passed_checks = package_applicability_changed_as_expected( + target_sat, + client, + FAKE_1_CUSTOM_PACKAGE, + prior_app_errata, + pre_errata_count, + pre_package_count, + ) + assert ( + passed_checks is True + ), f'The package: {FAKE_1_CUSTOM_PACKAGE}, was not applicable to any erratum present on host: {client.hostname}.' + # Setup host collection using client ids + host_collection = target_sat.api.HostCollection(organization=module_sca_manifest_org).create() host_ids = [client.nailgun_host.id for client in content_hosts] host_collection.host_ids = host_ids host_collection = host_collection.update(['host_ids']) + # Install erratum to host collection task_id = target_sat.api.JobInvocation().run( data={ 'feature': 'katello_errata_install', 'inputs': {'errata': str(CUSTOM_REPO_ERRATA_ID)}, 'targeting_type': 'static_query', 'search_query': f'host_collection_id = {host_collection.id}', - 'organization_id': module_org.id, + 'organization_id': module_sca_manifest_org.id, }, )['id'] - target_sat.wait_for_tasks( - search_query=(f'label = Actions::RemoteExecution::RunHostsJob and id = {task_id}'), - search_rate=15, - max_tries=10, + ( + target_sat.wait_for_tasks( + search_query=(f'label = Actions::RemoteExecution::RunHostsJob and id = {task_id}'), + search_rate=15, + max_tries=10, + ), + ( + f'Could not install erratum: {CUSTOM_REPO_ERRATA_ID}, to Host-Collection.' + f' Task: {task_id} failed, or timed out.' + ), ) for client in content_hosts: - result = client.run(f'rpm -q {constants.FAKE_2_CUSTOM_PACKAGE}') - assert result.status == 0 + # No applicable errata after install on all clients + assert ( + client.applicable_errata_count == 0 + ), f'A client in Host-Collection: {client.hostname}, had {client.applicable_errata_count} ' + 'applicable errata, expected 0.' + # Updated package is present on all clients + result = client.run(f'rpm -q {FAKE_2_CUSTOM_PACKAGE}') + assert result.status == 0, ( + f'The client in Host-Collection: {client.hostname},' + f' could not find the updated package: {FAKE_2_CUSTOM_PACKAGE}' + ) + # No applicable packages on client + assert client.applicable_package_count == 0, ( + f'A client in Host-Collection: {client.hostname}, had {client.applicable_package_count} ' + f'applicable package(s) after installing erratum: {CUSTOM_REPO_ERRATA_ID}, but expected 0.' + ) @pytest.mark.tier3 -@pytest.mark.rhel_ver_list([7, 8, 9]) +@pytest.mark.rhel_ver_match('[^6]') @pytest.mark.no_containers @pytest.mark.e2e def test_positive_install_multiple_in_host( - module_org, activation_key, custom_repo, rhel_contenthost, target_sat + target_sat, rhel_contenthost, module_org, activation_key, module_lce ): """For a host with multiple applicable errata install one and ensure - the rest of errata is still available + the rest of errata is still available, repeat for some list of errata. + After each package or errata install, check applicability updates + as expected. :id: 67b7e95b-9809-455a-a74e-f1815cc537fc + :setup: + 1. An Unregistered host. + 2. Errata synced on satellite server. + + :steps: + 1. Setup content for a content host (repos, cv, etc) + 2. Register vm as a content host + 3. Remove any impacted custom packages present + - no applicable errata to start + 4. Install outdated versions of the custom packages + - some expected applicable errata + 5. Install any applicable security errata + - errata applicability drops after each install + - applicable packages drops by amount updated + - impacted package(s) updated and found + + :expectedresults: + 1. Package installation succeeded, if the package makes a + new errata applicable; available errata counter + increased by one. + 2. Errata apply task succeeded, available errata + counter decreased by one; it is possible to schedule + another errata installation. + 3. Applicable package counter decreased by number + of updated packages. Updated package(s) found. + 4. Errata recalculate applicability task is invoked + automatically, after install command of applicable package, + and errata apply task. Task(s) found and finish successfully. + :customerscenario: true :BZ: 1469800, 1528275, 1983043, 1905560 - :expectedresults: errata installation task succeeded, available errata - counter decreased by one; it's possible to schedule another errata - installation - :CaseImportance: Medium :parametrized: yes + """ - rhel_contenthost.install_katello_ca(target_sat) - rhel_contenthost.register_contenthost(module_org.label, activation_key.name) + # Associate custom repos with org, lce, ak: + custom_repo_id = target_sat.cli_factory.setup_org_for_a_custom_repo( + { + 'url': settings.repos.yum_9.url, + 'organization-id': module_org.id, + 'lifecycle-environment-id': module_lce.id, + 'activationkey-id': activation_key.id, + } + )['repository-id'] + rhel_contenthost.register( + activation_keys=activation_key.name, + target=target_sat, + org=module_org, + loc=None, + ) assert rhel_contenthost.subscribed - host = rhel_contenthost.nailgun_host - for package in constants.FAKE_9_YUM_OUTDATED_PACKAGES: - _install_package( - module_org, clients=[rhel_contenthost], host_ids=[host.id], package_name=package + # 1s margin of safety for rounding + epoch_timestamp = int(time() - 1) + # Remove any packages errata could apply to, verify none are present on host + for package in FAKE_9_YUM_OUTDATED_PACKAGES: + pkg_name = str(package.split("-", 1)[0]) # 'bear-4.0-1.noarch' > 'bear' + result = rhel_contenthost.run(f'yum remove -y {pkg_name}') + assert rhel_contenthost.run(f'rpm -q {pkg_name}').status == 1 + + # Wait for any recalculate task(s), possibly invoked by yum remove, + # catch AssertionError raised if no task was generated + try: + target_sat.api_factory.wait_for_errata_applicability_task( + host_id=rhel_contenthost.nailgun_host.id, + from_when=epoch_timestamp, + ) + except AssertionError: + # Yum remove did not trigger any errata recalculate task, + # assert any YUM_9 packages were/are not present, then continue + present_packages = set( + [ + package.filename + for package in target_sat.api.Package(repository=custom_repo_id).search() + ] + ) + assert not set(FAKE_9_YUM_OUTDATED_PACKAGES).intersection(present_packages) + assert not set(FAKE_9_YUM_UPDATED_PACKAGES).intersection(present_packages) + + # No applicable errata to start + assert rhel_contenthost.applicable_errata_count == 0 + present_applicable_packages = [] + # Installing all YUM_9 outdated custom packages + for i in range(len(FAKE_9_YUM_OUTDATED_PACKAGES)): + # record params prior to install, for post-install checks + package_filename = FAKE_9_YUM_OUTDATED_PACKAGES[i] + FAKE_9_YUM_UPDATED_PACKAGES[i] + pre_errata_count = rhel_contenthost.applicable_errata_count + pre_package_count = rhel_contenthost.applicable_package_count + prior_app_errata = _fetch_available_errata_instances(target_sat, rhel_contenthost) + # 1s margin of safety for rounding + epoch_timestamp = int(time() - 1) + assert rhel_contenthost.run(f'yum install -y {package_filename}').status == 0 + # Wait for async errata recalculate task(s), invoked by yum install, + # searching back 1s prior to install. + target_sat.api_factory.wait_for_errata_applicability_task( + host_id=rhel_contenthost.nailgun_host.id, + from_when=epoch_timestamp, + ) + # outdated package found on host + assert rhel_contenthost.run(f'rpm -q {package_filename}').status == 0 + """ + Modifying the applicable package did all: + 1. changed package applicability count by one and only one. + 2. changed errata applicability count by number of affected errata, whose + applicability status changed after package was modified. + 3. changed lists of applicable packages and applicable errata accordingly. + - otherwise raise `AssertionError` in below method; + """ + passed_checks = package_applicability_changed_as_expected( + target_sat, + rhel_contenthost, + package_filename, + prior_app_errata, + pre_errata_count, + pre_package_count, + ) + # If passed_checks is False, this package was not applicable, continue to next. + if passed_checks is True: + present_applicable_packages.append(package_filename) + + # Some applicable errata(s) now expected for outdated packages + assert rhel_contenthost.applicable_errata_count > 0 + # Expected applicable package(s) now for the applicable errata + assert rhel_contenthost.applicable_package_count == len(present_applicable_packages) + post_app_errata = _fetch_available_errata_instances(target_sat, rhel_contenthost) + """Installing all YUM_9 security errata sequentially, if applicable. + after each install, applicable-errata-count should drop by one, + one or more of the erratum's listed packages should be updated. + """ + installed_errata = [] + updated_packages = [] + expected_errata_to_install = [ + errata.errata_id + for errata in post_app_errata + if errata.errata_id in FAKE_9_YUM_SECURITY_ERRATUM + ] + all_applicable_packages = set( + package for errata in post_app_errata for package in errata.packages + ) + security_packages_to_install = set() + for errata_id in FAKE_9_YUM_SECURITY_ERRATUM: + errata_instance = ( + target_sat.api.Errata().search(query={'search': f'errata_id="{errata_id}"'})[0].read() ) - applicable_errata_count = rhel_contenthost.applicable_errata_count - assert applicable_errata_count > 1 - rhel_contenthost.add_rex_key(satellite=target_sat) - for errata in settings.repos.yum_9.errata[1:4]: + present_packages_impacted_by_errata = [ + package + for package in errata_instance.packages + if package in FAKE_9_YUM_UPDATED_PACKAGES + ] + security_packages_to_install.update(present_packages_impacted_by_errata) + # Are expected security errata packages found in all applicable packages ? + assert security_packages_to_install.issubset(all_applicable_packages) + # Try to install each ERRATUM in FAKE_9_YUM_SECURITY_ERRATUM list, + # Each time, check lists of applicable erratum and packages, and counts + for ERRATUM in FAKE_9_YUM_SECURITY_ERRATUM: + pre_errata_count = rhel_contenthost.applicable_errata_count + ERRATUM_instance = ( + target_sat.api.Errata().search(query={'search': f'errata_id="{ERRATUM}"'})[0].read() + ) + # Check each time before each install + applicable_errata = _fetch_available_errata_instances(target_sat, rhel_contenthost) + # If this ERRATUM is not applicable, continue to next + if (len(applicable_errata) == 0) or ( + ERRATUM not in [_errata.errata_id for _errata in applicable_errata] + ): + continue + assert pre_errata_count >= 1 + errata_packages = [] + pre_package_count = rhel_contenthost.applicable_package_count + # From search result, find this ERRATUM by erratum_id, + # save the relevant list of package(s) + for _errata in applicable_errata: + if _errata.errata_id == ERRATUM: + errata_packages = _errata.packages + assert len(errata_packages) >= 1 + epoch_timestamp = int(time() - 1) + # Install this ERRATUM to host, wait for REX task task_id = target_sat.api.JobInvocation().run( data={ 'feature': 'katello_errata_install', - 'inputs': {'errata': str(errata)}, + 'inputs': {'errata': str(ERRATUM)}, 'targeting_type': 'static_query', 'search_query': f'name = {rhel_contenthost.hostname}', 'organization_id': module_org.id, @@ -252,23 +827,102 @@ def test_positive_install_multiple_in_host( search_rate=20, max_tries=15, ) - applicable_errata_count -= 1 - assert rhel_contenthost.applicable_errata_count == applicable_errata_count + # Wait for async errata recalculate task(s), invoked by REX task + target_sat.api_factory.wait_for_errata_applicability_task( + host_id=rhel_contenthost.nailgun_host.id, + from_when=epoch_timestamp, + ) + # Host Applicable Errata count decreased by one + assert ( + rhel_contenthost.applicable_errata_count == pre_errata_count - 1 + ), f'Host applicable errata did not decrease by one, after installation of {ERRATUM}' + # Applying this ERRATUM updated one or more of the erratum's listed packages + found_updated_packages = [] + for package in errata_packages: + result = rhel_contenthost.run(f'rpm -q {package}') + if result.status == 0: + assert ( + package in FAKE_9_YUM_UPDATED_PACKAGES + ), f'An unexpected package: "{package}", was updated by this errata: {ERRATUM}.' + if package in ERRATUM_instance.packages: + found_updated_packages.append(package) + + assert len(found_updated_packages) > 0, ( + f'None of the expected errata.packages: {errata_packages}, were found on host: "{rhel_contenthost.hostname}",' + f' after installing the applicable errata: {ERRATUM}.' + ) + # Host Applicable Packages count dropped by number of packages updated + assert rhel_contenthost.applicable_package_count == pre_package_count - len( + found_updated_packages + ), ( + f'Host: "{rhel_contenthost.hostname}" applicable package count did not decrease by {len(found_updated_packages)},' + f' after errata: {ERRATUM} installed updated packages: {found_updated_packages}' + ) + installed_errata.append(ERRATUM) + updated_packages.extend(found_updated_packages) + + # In case no ERRATUM in list are applicable: + # Lack of any package or errata install will raise `AssertionError`. + assert ( + len(installed_errata) > 0 + ), f'No applicable errata were found or installed from list: {FAKE_9_YUM_SECURITY_ERRATUM}.' + assert ( + len(updated_packages) > 0 + ), f'No applicable packages were found or installed from list: {FAKE_9_YUM_UPDATED_PACKAGES}.' + # Each expected erratum and packages installed only once + pkg_set = set(updated_packages) + errata_set = set(installed_errata) + assert len(pkg_set) == len( + updated_packages + ), f'Expect no repeat packages in install list: {updated_packages}.' + assert len(errata_set) == len( + installed_errata + ), f'Expected no repeat errata in install list: {installed_errata}.' + # Only the expected YUM_9 packages were installed + assert set(updated_packages).issubset(set(FAKE_9_YUM_UPDATED_PACKAGES)) + # Only the expected YUM_9 errata were updated + assert set(installed_errata).issubset(set(FAKE_9_YUM_SECURITY_ERRATUM)) + # Check number of installed errata id(s) matches expected + assert len(installed_errata) == len(expected_errata_to_install), ( + f'Expected to install {len(expected_errata_to_install)} errata from list: {FAKE_9_YUM_SECURITY_ERRATUM},' + f' but installed: {len(installed_errata)}.' + ) + # Check sets of installed errata id(s) strings, matches expected + assert set(installed_errata) == set( + expected_errata_to_install + ), 'Expected errata id(s) and installed errata id(s) are not the same.' + # Check number of updated package version filename(s) matches expected + assert len(updated_packages) == len(security_packages_to_install), ( + f'Expected to install {len(security_packages_to_install)} packages from list: {FAKE_9_YUM_UPDATED_PACKAGES},' + f' but installed {len(updated_packages)}.' + ) + # Check sets of installed package filename(s) strings, matches expected + assert ( + set(updated_packages) == set(security_packages_to_install) + ), 'Expected package version filename(s) and installed package version filenam(s) are not the same.' @pytest.mark.tier3 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_list(module_org, custom_repo, target_sat): - """View all errata specific to repository +def test_positive_list_sorted_filtered(custom_repo, target_sat): + """View, sort, and filter all errata specific to repository. :id: 1efceabf-9821-4804-bacf-2213ac0c7550 :Setup: Errata synced on satellite server. - :steps: Create two repositories each synced and containing errata + :Steps: + + 1. Create two repositories each synced and containing errata + 2. GET /katello/api/errata + + :expectedresults: + + 1. Check that the errata belonging to one repo is not + showing in the other. + 2. Check that the errata can be sorted by updated date, + issued date, and filtered by CVE. - :expectedresults: Check that the errata belonging to one repo is not - showing in the other. """ repo1 = target_sat.api.Repository(id=custom_repo['repository-id']).read() repo2 = target_sat.api.Repository( @@ -281,6 +935,7 @@ def test_positive_list(module_org, custom_repo, target_sat): repo2_errata_ids = [ errata['errata_id'] for errata in repo2.errata(data={'per_page': '1000'})['results'] ] + # Check errata are viewable, errata for one repo is not showing in the other assert len(repo1_errata_ids) == len(settings.repos.yum_9.errata) assert len(repo2_errata_ids) == len(settings.repos.yum_3.errata) assert CUSTOM_REPO_ERRATA_ID in repo1_errata_ids @@ -288,19 +943,7 @@ def test_positive_list(module_org, custom_repo, target_sat): assert settings.repos.yum_3.errata[5] in repo2_errata_ids assert settings.repos.yum_3.errata[5] not in repo1_errata_ids - -@pytest.mark.tier3 -def test_positive_list_updated(module_org, custom_repo, target_sat): - """View all errata in an Org sorted by Updated - - :id: 560d6584-70bd-4d1b-993a-cc7665a9e600 - - :Setup: Errata synced on satellite server. - - :steps: GET /katello/api/errata - - :expectedresults: Errata is filtered by Org and sorted by Updated date. - """ + # View all errata in Org sorted by Updated repo = target_sat.api.Repository(id=custom_repo['repository-id']).read() assert repo.sync()['result'] == 'success' erratum_list = target_sat.api.Errata(repository=repo).search( @@ -309,31 +952,17 @@ def test_positive_list_updated(module_org, custom_repo, target_sat): updated = [errata.updated for errata in erratum_list] assert updated == sorted(updated) - -@pytest.mark.tier3 -def test_positive_sorted_issue_date_and_filter_by_cve(module_org, custom_repo, target_sat): - """Sort by issued date and filter errata by CVE - - :id: a921d4c2-8d3d-4462-ba6c-fbd4b898a3f2 - - :Setup: Errata synced on satellite server. - - :steps: GET /katello/api/errata - - :expectedresults: Errata is sorted by issued date and filtered by CVE. - """ # Errata is sorted by issued date. erratum_list = target_sat.api.Errata(repository=custom_repo['repository-id']).search( query={'order': 'issued ASC', 'per_page': '1000'} ) issued = [errata.issued for errata in erratum_list] assert issued == sorted(issued) - # Errata is filtered by CVE erratum_list = target_sat.api.Errata(repository=custom_repo['repository-id']).search( query={'order': 'cve DESC', 'per_page': '1000'} ) - # Most of Errata don't have any CVEs. Removing empty CVEs from results + # Most Errata won't have any CVEs. Removing empty CVEs from results erratum_cves = [errata.cves for errata in erratum_list if errata.cves] # Verifying each errata have its CVEs sorted in DESC order for errata_cves in erratum_cves: @@ -342,66 +971,67 @@ def test_positive_sorted_issue_date_and_filter_by_cve(module_org, custom_repo, t @pytest.fixture(scope='module') -def setup_content_rhel6(module_entitlement_manifest_org, module_target_sat): - """Setup content fot rhel6 content host - Using `Red Hat Enterprise Virtualization Agents for RHEL 6 Server (RPMs)` - from manifest, SATTOOLS_REPO for host-tools and yum_9 repo as custom repo. - - :return: Activation Key, Organization, subscription list - """ - org = module_entitlement_manifest_org - rh_repo_id_rhva = module_target_sat.api_factory.enable_rhrepo_and_fetchid( - basearch='x86_64', - org_id=org.id, - product=constants.PRDS['rhel'], - repo=constants.REPOS['rhva6']['name'], - reposet=constants.REPOSET['rhva6'], - releasever=constants.DEFAULT_RELEASE_VERSION, - ) - rh_repo = module_target_sat.api.Repository(id=rh_repo_id_rhva).read() - rh_repo.sync() +def setup_content_rhel8( + module_sca_manifest_org, + rh_repo_module_manifest, + activation_key, + module_lce, + module_cv, + module_target_sat, + return_result=True, +): + """Setup content for rhel8 content host + Using RH SAT-TOOLS RHEL8 for sat-tools, and FAKE_YUM_9 as custom-repo. + Published to content-view and promoted to lifecycle-environment. - host_tools_product = module_target_sat.api.Product(organization=org).create() - host_tools_repo = module_target_sat.api.Repository( - product=host_tools_product, - ).create() - host_tools_repo.url = settings.repos.SATCLIENT_REPO.RHEL6 - host_tools_repo = host_tools_repo.update(['url']) - host_tools_repo.sync() + Raises `AssertionError` if one or more of the setup components read are empty. - custom_product = module_target_sat.api.Product(organization=org).create() - custom_repo = module_target_sat.api.Repository( - product=custom_product, - ).create() - custom_repo.url = CUSTOM_REPO_URL - custom_repo = custom_repo.update(['url']) + :return: if return_result is True: otherwise None + A dictionary (_result) with the satellite instances of activaton-key, organization, + content-view, lifecycle-environment, rh_repo, custom_repo. + """ + org = module_sca_manifest_org + # Setup Custom and RH repos + custom_repo_id = module_target_sat.cli_factory.setup_org_for_a_custom_repo( + { + 'url': CUSTOM_REPO_URL, + 'organization-id': org.id, + 'lifecycle-environment-id': module_lce.id, + 'activationkey-id': activation_key.id, + 'content-view-id': module_cv.id, + } + )['repository-id'] + custom_repo = module_target_sat.api.Repository(id=custom_repo_id).read() custom_repo.sync() - - lce = module_target_sat.api.LifecycleEnvironment(organization=org).create() - - cv = module_target_sat.api.ContentView( - organization=org, - repository=[rh_repo_id_rhva, host_tools_repo.id, custom_repo.id], - ).create() - cv.publish() - cvv = cv.read().version[0].read() - cvv.promote(data={'environment_ids': lce.id, 'force': False}) - - ak = module_target_sat.api.ActivationKey( - content_view=cv, organization=org, environment=lce - ).create() - - sub_list = [DEFAULT_SUBSCRIPTION_NAME, host_tools_product.name, custom_product.name] - for sub_name in sub_list: - subscription = module_target_sat.api.Subscription(organization=org).search( - query={'search': f'name="{sub_name}"'} - )[0] - ak.add_subscriptions(data={'subscription_id': subscription.id}) - return ak, org, sub_list + # Sync and add RH repo + rh_repo = module_target_sat.api.Repository(id=rh_repo_module_manifest.id).read() + rh_repo.sync() + module_target_sat.cli.ContentView.add_repository( + {'id': module_cv.id, 'organization-id': org.id, 'repository-id': rh_repo.id} + ) + _cv = cv_publish_promote(module_target_sat, org, module_cv, module_lce) + module_cv = _cv['content-view'] + latest_cvv = _cv['content-view-version'] + + _result = { + 'activation-key': activation_key.read(), + 'organization': org.read(), + 'content-view': module_cv.read(), + 'content-view-version': latest_cvv.read(), + 'lifecycle-environment': module_lce.read(), + 'rh_repo': rh_repo.read(), + 'custom_repo': custom_repo.read(), + } + assert all( + entry for entry in _result.values() + ), f'One or more necessary components are not present: {_result}' + return _result if return_result else None -@pytest.mark.tier3 -def test_positive_get_count_for_host(setup_content_rhel6, rhel6_contenthost, target_sat): +@pytest.mark.tier2 +def test_positive_get_count_for_host( + setup_content_rhel8, activation_key, rhel8_contenthost, module_target_sat +): """Available errata count when retrieving Host :id: 2f35933f-8026-414e-8f75-7f4ec048faae @@ -409,49 +1039,82 @@ def test_positive_get_count_for_host(setup_content_rhel6, rhel6_contenthost, tar :Setup: 1. Errata synced on satellite server. - 2. Some Content hosts present. + 2. Some client host present. + 3. Some rh repo and custom repo, added to content-view. + + :Steps: - :steps: GET /api/v2/hosts + 1. Register content host + 2. Install some outdated packages + 3. GET /api/v2/hosts + + :expectedresults: The applicable errata count is retrieved. - :expectedresults: The available errata count is retrieved. :parametrized: yes :CaseImportance: Medium """ - ak_name = setup_content_rhel6[0].name - org_label = setup_content_rhel6[1].label - org_id = setup_content_rhel6[1].id - sub_list = setup_content_rhel6[2] - rhel6_contenthost.install_katello_ca(target_sat) - rhel6_contenthost.register_contenthost(org_label, ak_name) - assert rhel6_contenthost.subscribed - pool_id = rhel6_contenthost.subscription_manager_get_pool(sub_list=sub_list) - pool_list = [pool_id[0][0]] - rhel6_contenthost.subscription_manager_attach_pool(pool_list=pool_list) - rhel6_contenthost.install_katello_host_tools() - rhel6_contenthost.enable_repo(constants.REPOS['rhva6']['id']) - host = rhel6_contenthost.nailgun_host + org = setup_content_rhel8['organization'] + custom_repo = setup_content_rhel8['rh_repo'] + rhel8_contenthost.create_custom_repos(**{f'{custom_repo.name}': custom_repo.url}) + result = rhel8_contenthost.register( + org=org, + activation_keys=activation_key.name, + target=module_target_sat, + loc=None, + ) + assert ( + result.status == 0 + ), f'Failed to register the host - {rhel8_contenthost.hostname}: {result.stderr}' + assert rhel8_contenthost.subscribed + rhel8_contenthost.execute(r'subscription-manager repos --enable \*') + host = rhel8_contenthost.nailgun_host.read() + # No applicable errata to start + assert rhel8_contenthost.applicable_errata_count == 0 for errata in ('security', 'bugfix', 'enhancement'): - _validate_errata_counts(org_id, host, errata_type=errata, expected_value=0) - rhel6_contenthost.run(f'yum install -y {constants.FAKE_1_CUSTOM_PACKAGE}') - _validate_errata_counts(org_id, host, errata_type='security', expected_value=1) - rhel6_contenthost.run(f'yum install -y {constants.REAL_0_RH_PACKAGE}') - _validate_errata_counts(org_id, host, errata_type='bugfix', expected_value=2) + _validate_errata_counts(host, errata_type=errata, expected_value=0) + # One bugfix errata after installing outdated Kangaroo + result = rhel8_contenthost.execute(f'yum install -y {FAKE_9_YUM_OUTDATED_PACKAGES[7]}') + assert result.status == 0, f'Failed to install package {FAKE_9_YUM_OUTDATED_PACKAGES[7]}' + _validate_errata_counts(host, errata_type='bugfix', expected_value=1) + # One enhancement errata after installing outdated Gorilla + result = rhel8_contenthost.execute(f'yum install -y {FAKE_9_YUM_OUTDATED_PACKAGES[3]}') + assert result.status == 0, f'Failed to install package {FAKE_9_YUM_OUTDATED_PACKAGES[3]}' + _validate_errata_counts(host, errata_type='enhancement', expected_value=1) + # Install and check two outdated packages, with applicable security erratum + # custom_repo outdated Walrus + result = rhel8_contenthost.execute(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}') + assert result.status == 0, f'Failed to install package {FAKE_1_CUSTOM_PACKAGE}' + _validate_errata_counts(host, errata_type='security', expected_value=1) + # rh_repo outdated Puppet-agent + result = rhel8_contenthost.execute(f'yum install -y {REAL_RHEL8_1_PACKAGE_FILENAME}') + assert result.status == 0, f'Failed to install package {REAL_RHEL8_1_PACKAGE_FILENAME}' + _validate_errata_counts(host, errata_type='security', expected_value=2) + # All avaliable errata present + assert rhel8_contenthost.applicable_errata_count == 4 @pytest.mark.upgrade @pytest.mark.tier3 -def test_positive_get_applicable_for_host(setup_content_rhel6, rhel6_contenthost, target_sat): +def test_positive_get_applicable_for_host( + setup_content_rhel8, activation_key, rhel8_contenthost, target_sat +): """Get applicable errata ids for a host :id: 51d44d51-eb3f-4ee4-a1df-869629d427ac :Setup: + 1. Errata synced on satellite server. - 2. Some Content hosts present. + 2. Some client hosts present. + 3. Some rh repo and custom repo, added to content-view. - :steps: GET /api/v2/hosts/:id/errata + :Steps: + + 1. Register vm as a content host + 2. Install some outdated packages + 3. GET /api/v2/hosts/:id/errata :expectedresults: The available errata is retrieved. @@ -459,30 +1122,44 @@ def test_positive_get_applicable_for_host(setup_content_rhel6, rhel6_contenthost :CaseImportance: Medium """ - ak_name = setup_content_rhel6[0].name - org_label = setup_content_rhel6[1].label - org_id = setup_content_rhel6[1].id - rhel6_contenthost.install_katello_ca(target_sat) - rhel6_contenthost.register_contenthost(org_label, ak_name) - assert rhel6_contenthost.subscribed - pool_id = rhel6_contenthost.subscription_manager_get_pool(sub_list=setup_content_rhel6[2]) - pool_list = [pool_id[0][0]] - rhel6_contenthost.subscription_manager_attach_pool(pool_list=pool_list) - rhel6_contenthost.install_katello_host_tools() - rhel6_contenthost.enable_repo(constants.REPOS['rhva6']['id']) - host = rhel6_contenthost.nailgun_host - erratum = _fetch_available_errata(org_id, host, expected_amount=0) + org = setup_content_rhel8['organization'] + custom_repo = setup_content_rhel8['rh_repo'] + + rhel8_contenthost.create_custom_repos(**{f'{custom_repo.name}': custom_repo.url}) + result = rhel8_contenthost.register( + activation_keys=activation_key.name, + target=target_sat, + org=org, + loc=None, + ) + assert ( + result.status == 0 + ), f'Failed to register the host - {rhel8_contenthost.hostname}: {result.stderr}' + assert rhel8_contenthost.subscribed + rhel8_contenthost.execute(r'subscription-manager repos --enable \*') + for errata in REPO_WITH_ERRATA['errata']: + # Remove custom package if present, old or new. + package_name = errata['package_name'] + result = rhel8_contenthost.execute(f'yum erase -y {package_name}') + if result.status != 0: + pytest.fail(f'Failed to remove {package_name}: {result.stdout} {result.stderr}') + + rhel8_contenthost.execute('subscription-manager repos') + assert rhel8_contenthost.applicable_errata_count == 0 + host = rhel8_contenthost.nailgun_host.read() + # Check no applicable errata to start + erratum = _fetch_available_errata(host, expected_amount=0) assert len(erratum) == 0 - rhel6_contenthost.run(f'yum install -y {constants.FAKE_1_CUSTOM_PACKAGE}') - erratum = _fetch_available_errata(org_id, host, 1) + # Install outdated applicable custom package + rhel8_contenthost.run(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}') + erratum = _fetch_available_errata(host, 1) assert len(erratum) == 1 assert CUSTOM_REPO_ERRATA_ID in [errata['errata_id'] for errata in erratum] - rhel6_contenthost.run(f'yum install -y {constants.REAL_0_RH_PACKAGE}') - erratum = _fetch_available_errata(org_id, host, 3) - assert len(erratum) == 3 - assert {constants.REAL_1_ERRATA_ID, constants.REAL_2_ERRATA_ID}.issubset( - {errata['errata_id'] for errata in erratum} - ) + # Install outdated applicable real package (from RH repo) + rhel8_contenthost.run(f'yum install -y {REAL_RHEL8_1_PACKAGE_FILENAME}') + erratum = _fetch_available_errata(host, 2) + assert len(erratum) == 2 + assert REAL_RHEL8_1_ERRATA_ID in [errata['errata_id'] for errata in erratum] @pytest.mark.tier3 @@ -497,15 +1174,17 @@ def test_positive_get_diff_for_cv_envs(target_sat): 1. Errata synced on satellite server. 2. Multiple environments present. - :steps: GET /katello/api/compare + :Steps: GET /katello/api/compare :expectedresults: Difference in errata between a set of environments for a content view is retrieved. + """ org = target_sat.api.Organization().create() env = target_sat.api.LifecycleEnvironment(organization=org).create() content_view = target_sat.api.ContentView(organization=org).create() activation_key = target_sat.api.ActivationKey(environment=env, organization=org).create() + # Published content-view-version with repos will be created for repo_url in [settings.repos.yum_9.url, CUSTOM_REPO_URL]: target_sat.cli_factory.setup_org_for_a_custom_repo( { @@ -517,32 +1196,34 @@ def test_positive_get_diff_for_cv_envs(target_sat): } ) new_env = target_sat.api.LifecycleEnvironment(organization=org, prior=env).create() - cvvs = content_view.read().version[-2:] - cvvs[-1].promote(data={'environment_ids': new_env.id, 'force': False}) + # no need to publish a new version, just promote newest + cv_publish_promote( + sat=target_sat, org=org, cv=content_view, lce=[env, new_env], needs_publish=False + ) + content_view = target_sat.api.ContentView(id=content_view.id).read() + # Get last two versions by id to compare + cvv_ids = sorted(cvv.id for cvv in content_view.version)[-2:] result = target_sat.api.Errata().compare( - data={'content_view_version_ids': [cvv.id for cvv in cvvs], 'per_page': '9999'} + data={'content_view_version_ids': [cvv_id for cvv_id in cvv_ids], 'per_page': '9999'} ) cvv2_only_errata = next( errata for errata in result['results'] if errata['errata_id'] == CUSTOM_REPO_ERRATA_ID ) - assert cvvs[-1].id in cvv2_only_errata['comparison'] + assert cvv_ids[-1] in cvv2_only_errata['comparison'] both_cvvs_errata = next( - errata - for errata in result['results'] - if errata['errata_id'] in constants.FAKE_9_YUM_SECURITY_ERRATUM + errata for errata in result['results'] if errata['errata_id'] in FAKE_9_YUM_SECURITY_ERRATUM ) - assert {cvv.id for cvv in cvvs} == set(both_cvvs_errata['comparison']) + assert {cvv_id for cvv_id in cvv_ids} == set(both_cvvs_errata['comparison']) @pytest.mark.tier3 def test_positive_incremental_update_required( - module_org, + module_sca_manifest_org, module_lce, activation_key, module_cv, - custom_repo, - rh_repo, - rhel7_contenthost, + rh_repo_module_manifest, + rhel8_contenthost, target_sat, ): """Given a set of hosts and errata, check for content view version @@ -553,7 +1234,7 @@ def test_positive_incremental_update_required( :Setup: 1. Errata synced on satellite server - :steps: + :Steps: 1. Create VM as Content Host, registering to CV with custom errata 2. Install package in VM so it needs one erratum 3. Check if incremental_updates required: @@ -573,27 +1254,38 @@ def test_positive_incremental_update_required( :BZ: 2013093 """ - rhel7_contenthost.install_katello_ca(target_sat) - rhel7_contenthost.register_contenthost(module_org.label, activation_key.name) - assert rhel7_contenthost.subscribed - rhel7_contenthost.enable_repo(constants.REPOS['rhst7']['id']) - rhel7_contenthost.install_katello_agent() - host = rhel7_contenthost.nailgun_host - # install package to create demand for an Erratum - _install_package( - module_org, - [rhel7_contenthost], - [host.id], - constants.FAKE_1_CUSTOM_PACKAGE, - via_ssh=True, - rpm_package_name=constants.FAKE_1_CUSTOM_PACKAGE, + org = module_sca_manifest_org + rh_repo = target_sat.api.Repository( + id=rh_repo_module_manifest.id, + ).read() + rh_repo.sync() + # Add RH repo to content-view + target_sat.cli.ContentView.add_repository( + {'id': module_cv.id, 'organization-id': org.id, 'repository-id': rh_repo.id} ) + module_cv = target_sat.api.ContentView(id=module_cv.id).read() + _cv = cv_publish_promote(target_sat, org, module_cv, module_lce) + module_cv = _cv['content-view'] + + result = rhel8_contenthost.register( + org=org, + activation_keys=activation_key.name, + target=target_sat, + loc=None, + ) + assert result.status == 0, f'Failed to register the host: {rhel8_contenthost.hostname}' + assert rhel8_contenthost.subscribed + rhel8_contenthost.execute(r'subscription-manager repos --enable \*') + host = rhel8_contenthost.nailgun_host.read() + # install package to create demand for an Erratum + result = rhel8_contenthost.run(f'yum install -y {REAL_RHEL8_1_PACKAGE_FILENAME}') + assert result.status == 0, f'Failed to install package: {REAL_RHEL8_1_PACKAGE_FILENAME}' # Call nailgun to make the API POST to see if any incremental updates are required response = target_sat.api.Host().bulk_available_incremental_updates( data={ - 'organization_id': module_org.id, + 'organization_id': org.id, 'included': {'ids': [host.id]}, - 'errata_ids': [settings.repos.yum_6.errata[2]], + 'errata_ids': [REAL_RHEL8_1_ERRATA_ID], }, ) assert not response, 'Incremental update should not be required at this point' @@ -602,25 +1294,22 @@ def test_positive_incremental_update_required( target_sat.api.RPMContentViewFilter( content_view=module_cv, inclusion=True, name='Include Nothing' ).create() - module_cv.publish() - module_cv = module_cv.read() - CV1V = module_cv.version[-1].read() - # Must promote a CV version into a new Environment before we can add errata - CV1V.promote(data={'environment_ids': module_lce.id, 'force': False}) - module_cv = module_cv.read() + module_cv = target_sat.api.ContentView(id=module_cv.id).read() + module_cv = cv_publish_promote(target_sat, org, module_cv, module_lce)['content-view'] + rhel8_contenthost.execute('subscription-manager repos') # Call nailgun to make the API POST to ensure an incremental update is required response = target_sat.api.Host().bulk_available_incremental_updates( data={ - 'organization_id': module_org.id, + 'organization_id': org.id, 'included': {'ids': [host.id]}, - 'errata_ids': [settings.repos.yum_6.errata[2]], + 'errata_ids': [REAL_RHEL8_1_ERRATA_ID], }, ) - assert 'next_version' in response[0], 'Incremental update should be suggested' - 'at this point' + assert response, 'Nailgun response for host(s) with avaliable incremental update was None' + assert 'next_version' in response[0], 'Incremental update should be suggested at this point' -def _run_remote_command_on_content_host(module_org, command, vm, return_result=False): +def _run_remote_command_on_content_host(command, vm, return_result=False): result = vm.run(command) assert result.status == 0 if return_result: @@ -628,268 +1317,183 @@ def _run_remote_command_on_content_host(module_org, command, vm, return_result=F return None -def _set_prerequisites_for_swid_repos(module_org, vm): +def _set_prerequisites_for_swid_repos(vm): _run_remote_command_on_content_host( - module_org, f'curl --insecure --remote-name {settings.repos.swid_tools_repo}', vm + f'curl --insecure --remote-name {settings.repos.swid_tools_repo}', vm ) - _run_remote_command_on_content_host(module_org, "mv *swid*.repo /etc/yum.repos.d", vm) - _run_remote_command_on_content_host(module_org, "yum install -y swid-tools", vm) - _run_remote_command_on_content_host(module_org, "dnf install -y dnf-plugin-swidtags", vm) + _run_remote_command_on_content_host('mv *swid*.repo /etc/yum.repos.d', vm) + _run_remote_command_on_content_host('yum install -y swid-tools', vm) + _run_remote_command_on_content_host('yum install -y dnf-plugin-swidtags', vm) -def _validate_swid_tags_installed(module_org, vm, module_name): +def _validate_swid_tags_installed(vm, module_name): result = _run_remote_command_on_content_host( - module_org, f"swidq -i -n {module_name} | grep 'Name'", vm, return_result=True + f"swidq -i -n {module_name} | grep 'Name'", vm, return_result=True ) assert module_name in result +@pytest.fixture +def errata_host_lce(module_sca_manifest_org, target_sat): + """Create and return a new lce in module SCA org.""" + return target_sat.api.LifecycleEnvironment(organization=module_sca_manifest_org).create() + + @pytest.mark.tier3 @pytest.mark.upgrade @pytest.mark.pit_client -@pytest.mark.parametrize( - 'module_repos_collection_with_manifest', - [{'YumRepository': {'url': settings.repos.swid_tag.url, 'distro': 'rhel8'}}], - indirect=True, -) @pytest.mark.no_containers +@pytest.mark.rhel_ver_match('8') def test_errata_installation_with_swidtags( - module_org, module_lce, module_repos_collection_with_manifest, rhel8_contenthost, target_sat + module_sca_manifest_org, + rhel_contenthost, + errata_host_lce, + target_sat, ): """Verify errata installation with swid_tags and swid tags get updated after module stream update. :id: 43a59b9a-eb9b-4174-8b8e-73d923b1e51e + :setup: + + 1. rhel8 contenthost checked out, using org with simple content access. + 2. create satellite repositories having rhel8 baseOS, prereqs, custom content w/ swid tags. + 3. associate repositories to org, lifecycle environment, and cv. Sync all content. + 4. publish & promote to environment, content view version with all content. + 5. create activation key, for registering host to cv. + :steps: - 1. create product and repository having swid tags - 2. create content view and published it with repository - 3. create activation key and register content host - 4. create rhel8, swid repos on content host - 5. install swid-tools, dnf-plugin-swidtags packages on content host - 6. install older module stream and generate errata, swid tag - 7. assert errata count, swid tags are generated - 8. install errata vis updating module stream - 9. assert errata count and swid tag after module update + 1. register host using cv's activation key, assert succeeded. + 2. install swid-tools, dnf-plugin-swidtags packages on content host. + 3. install older module stream and generate errata, swid tag. + 4. assert errata count, swid tags are generated. + 5. install errata via updating module stream. + 6. assert errata count and swid tag changed after module update. - :expectedresults: swid tags should get updated after errata installation via - module stream update + :expectedresults: + swid tags should get updated after errata installation via module stream update :CaseAutomation: Automated :parametrized: yes :CaseImportance: Critical + """ module_name = 'kangaroo' version = '20180704111719' - # setup rhel8 and sat_tools_repos - rhel8_contenthost.create_custom_repos( - **{ - 'baseos': settings.repos.rhel8_os.baseos, - 'appstream': settings.repos.rhel8_os.appstream, - } - ) - module_repos_collection_with_manifest.setup_virtual_machine( - rhel8_contenthost, install_katello_agent=False - ) + org = module_sca_manifest_org + lce = errata_host_lce + cv = target_sat.api.ContentView( + organization=org, + environment=[lce], + ).create() - # install older module stream - rhel8_contenthost.add_rex_key(satellite=target_sat) - _set_prerequisites_for_swid_repos(module_org, vm=rhel8_contenthost) - _run_remote_command_on_content_host( - module_org, f'dnf -y module install {module_name}:0:{version}', rhel8_contenthost + repos = { + 'base_os': settings.repos.rhel8_os.baseos, # base rhel8 + 'sat_tools': settings.repos.rhel8_os.appstream, # swid prereqs + 'swid_tags': settings.repos.swid_tag.url, # module stream pkgs and errata + } + # associate repos with sat, org, lce, cv, and sync + for r in repos: + target_sat.cli_factory.setup_org_for_a_custom_repo( + { + 'url': repos[r], + 'organization-id': org.id, + 'lifecycle-environment-id': lce.id, + 'content-view-id': cv.id, + }, + ) + # promote newest cv version with all repos/content + cv = cv_publish_promote( + sat=target_sat, + org=org, + cv=cv, + lce=lce, + )['content-view'] + # ak in env, tied to content-view + ak = target_sat.api.ActivationKey( + organization=org, + environment=lce, + content_view=cv, + ).create() + # register host with ak, succeeds + result = rhel_contenthost.register( + activation_keys=ak.name, + target=target_sat, + org=org, + loc=None, ) - target_sat.cli.Host.errata_recalculate({'host-id': rhel8_contenthost.nailgun_host.id}) + assert result.status == 0, f'Failed to register the host {target_sat.hostname},\n{result}' + assert ( + rhel_contenthost.subscribed + ), f'Failed to subscribe the host {target_sat.hostname}, to content.' + result = rhel_contenthost.execute(r'subscription-manager repos --enable \*') + assert result.status == 0, f'Failed to enable repositories with subscription-manager,\n{result}' + + # install outdated module stream package + _set_prerequisites_for_swid_repos(rhel_contenthost) + result = rhel_contenthost.execute(f'dnf -y module install {module_name}:0:{version}') + assert ( + result.status == 0 + ), f'Failed to install module stream package: {module_name}:0:{version}.\n{result.stdout}' + # recalculate errata after install of old module stream + rhel_contenthost.execute('subscription-manager repos') + # validate swid tags Installed - before_errata_apply_result = _run_remote_command_on_content_host( - module_org, - f"swidq -i -n {module_name} | grep 'File' | grep -o 'rpm-.*.swidtag'", - rhel8_contenthost, - return_result=True, + result = rhel_contenthost.execute( + f'swidq -i -n {module_name} | grep "File" | grep -o "rpm-.*.swidtag"', + ) + assert ( + result.status == 0 + ), f'An error occured trying to fetch swid tags for {module_name}.\n{result}' + before_errata_apply_result = result.stdout + assert before_errata_apply_result != '', f'Found no swid tags contained in {module_name}.' + assert (app_errata_count := rhel_contenthost.applicable_errata_count) == 1, ( + f'Found {rhel_contenthost.applicable_errata_count} applicable errata,' + f' after installing {module_name}:0:{version}, expected 1.' ) - assert before_errata_apply_result != '' - applicable_errata_count = rhel8_contenthost.applicable_errata_count - assert applicable_errata_count == 1 # apply modular errata - _run_remote_command_on_content_host( - module_org, f'dnf -y module update {module_name}', rhel8_contenthost + result = rhel_contenthost.execute(f'dnf -y module update {module_name}') + assert ( + result.status == 0 + ), f'Failed to update module stream package: {module_name}.\n{result.stdout}' + assert rhel_contenthost.execute('dnf -y upload-profile').status == 0 + + # recalculate and check errata after modular update + rhel_contenthost.execute('subscription-manager repos') + app_errata_count -= 1 + assert rhel_contenthost.applicable_errata_count == app_errata_count, ( + f'Found {rhel_contenthost.applicable_errata_count} applicable errata, after modular update of {module_name},' + f' expected {app_errata_count}.' ) - _run_remote_command_on_content_host(module_org, 'dnf -y upload-profile', rhel8_contenthost) - target_sat.cli.Host.errata_recalculate({'host-id': rhel8_contenthost.nailgun_host.id}) - applicable_errata_count -= 1 - assert rhel8_contenthost.applicable_errata_count == applicable_errata_count - after_errata_apply_result = _run_remote_command_on_content_host( - module_org, - f"swidq -i -n {module_name} | grep 'File'| grep -o 'rpm-.*.swidtag'", - rhel8_contenthost, - return_result=True, + # swidtags were updated based on package version + result = rhel_contenthost.execute( + f'swidq -i -n {module_name} | grep "File" | grep -o "rpm-.*.swidtag"', ) - - # swidtags get updated based on package version + assert ( + result.status == 0 + ), f'An error occured trying to fetch swid tags for {module_name}.\n{result}' + after_errata_apply_result = result.stdout assert before_errata_apply_result != after_errata_apply_result -"""Section for tests using RHEL8 Content Host. - The applicability tests using Default Content View are related to the introduction of Pulp3. - """ - - @pytest.fixture(scope='module') -def rh_repo_module_manifest(module_entitlement_manifest_org, module_target_sat): +def rh_repo_module_manifest(module_sca_manifest_org, module_target_sat): """Use module manifest org, creates tools repo, syncs and returns RH repo.""" # enable rhel repo and return its ID rh_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( - basearch=constants.DEFAULT_ARCHITECTURE, - org_id=module_entitlement_manifest_org.id, - product=constants.PRDS['rhel8'], - repo=constants.REPOS['rhst8']['name'], - reposet=constants.REPOSET['rhst8'], + basearch=DEFAULT_ARCHITECTURE, + org_id=module_sca_manifest_org.id, + product=PRDS['rhel8'], + repo=REPOS['rhst8']['name'], + reposet=REPOSET['rhst8'], releasever='None', ) # Sync step because repo is not synced by default rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() return rh_repo - - -@pytest.fixture(scope='module') -def rhel8_custom_repo_cv(module_entitlement_manifest_org, module_target_sat): - """Create repo and publish CV so that packages are in Library""" - return module_target_sat.cli_factory.setup_org_for_a_custom_repo( - { - 'url': settings.repos.module_stream_1.url, - 'organization-id': module_entitlement_manifest_org.id, - } - ) - - -@pytest.fixture(scope='module') -def rhel8_module_ak( - module_entitlement_manifest_org, - default_lce, - rh_repo_module_manifest, - rhel8_custom_repo_cv, - module_target_sat, -): - rhel8_module_ak = module_target_sat.api.ActivationKey( - content_view=module_entitlement_manifest_org.default_content_view, - environment=module_target_sat.api.LifecycleEnvironment( - id=module_entitlement_manifest_org.library.id - ), - organization=module_entitlement_manifest_org, - ).create() - # Ensure tools repo is enabled in the activation key - rhel8_module_ak.content_override( - data={ - 'content_overrides': [{'content_label': constants.REPOS['rhst8']['id'], 'value': '1'}] - } - ) - # Fetch available subscriptions - subs = module_target_sat.api.Subscription(organization=module_entitlement_manifest_org).search( - query={'search': f'{constants.DEFAULT_SUBSCRIPTION_NAME}'} - ) - assert subs - # Add default subscription to activation key - rhel8_module_ak.add_subscriptions(data={'subscription_id': subs[0].id}) - # Add custom subscription to activation key - product = module_target_sat.api.Product(organization=module_entitlement_manifest_org).search( - query={'search': 'redhat=false'} - ) - custom_sub = module_target_sat.api.Subscription( - organization=module_entitlement_manifest_org - ).search(query={'search': f'name={product[0].name}'}) - rhel8_module_ak.add_subscriptions(data={'subscription_id': custom_sub[0].id}) - return rhel8_module_ak - - -@pytest.mark.tier2 -def test_apply_modular_errata_using_default_content_view( - module_entitlement_manifest_org, - default_lce, - rhel8_contenthost, - rhel8_module_ak, - rhel8_custom_repo_cv, - target_sat, -): - """ - Registering a RHEL8 system to the default content view with no modules enabled results in - no modular errata or packages showing as applicable or installable - - Enabling a module on a RHEL8 system assigned to the default content view and installing an - older package should result in the modular errata and package showing as applicable and - installable - - :id: 030981dd-19ba-4f8b-9c24-0aee90aaa4c4 - - Steps: - 1. Register host with AK, install tools - 2. Assert no errata indicated - 3. Install older version of stream - 4. Assert errata is applicable - 5. Update module stream - 6. Assert errata is no longer applicable - - :expectedresults: Errata enumeration works with module streams when using default Content View - - :CaseAutomation: Automated - - :parametrized: yes - """ - module_name = 'duck' - stream = '0' - version = '20180704244205' - - rhel8_contenthost.install_katello_ca(target_sat) - rhel8_contenthost.register_contenthost( - module_entitlement_manifest_org.label, rhel8_module_ak.name - ) - assert rhel8_contenthost.subscribed - host = rhel8_contenthost.nailgun_host - host = host.read() - # Assert no errata on host, no packages applicable or installable - errata = _fetch_available_errata(module_entitlement_manifest_org, host, expected_amount=0) - assert len(errata) == 0 - rhel8_contenthost.install_katello_host_tools() - # Install older version of module stream to generate the errata - result = rhel8_contenthost.execute( - f'yum -y module install {module_name}:{stream}:{version}', - ) - assert result.status == 0 - # Check that there is now two errata applicable - errata = _fetch_available_errata(module_entitlement_manifest_org, host, 2) - target_sat.cli.Host.errata_recalculate({'host-id': rhel8_contenthost.nailgun_host.id}) - assert len(errata) == 2 - # Assert that errata package is required - assert constants.FAKE_3_CUSTOM_PACKAGE in errata[0]['module_streams'][0]['packages'] - # Update module - result = rhel8_contenthost.execute( - f'yum -y module update {module_name}:{stream}:{version}', - ) - assert result.status == 0 - # Check that there is now no errata applicable - errata = _fetch_available_errata(module_entitlement_manifest_org, host, 0) - assert len(errata) == 0 - - @pytest.mark.tier2 - @pytest.mark.skip("Uses old large_errata repo from repos.fedorapeople") - def test_positive_sync_repos_with_large_errata(target_sat): - """Attempt to synchronize 2 repositories containing large (or lots of) - errata. - - :id: d6680b9f-4c88-40b4-8b96-3d170664cb28 - - :customerscenario: true - - :BZ: 1463811 - - :expectedresults: both repositories were successfully synchronized - """ - org = target_sat.api.Organization().create() - for _ in range(2): - product = target_sat.api.Product(organization=org).create() - repo = target_sat.api.Repository(product=product, url=settings.repos.yum_7.url).create() - response = repo.sync() - assert response, f"Repository {repo} failed to sync." diff --git a/tests/foreman/api/test_foremantask.py b/tests/foreman/api/test_foremantask.py index c736a0e7161..0cab1253369 100644 --- a/tests/foreman/api/test_foremantask.py +++ b/tests/foreman/api/test_foremantask.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from requests.exceptions import HTTPError diff --git a/tests/foreman/api/test_host.py b/tests/foreman/api/test_host.py index 94730d56925..0982a161921 100644 --- a/tests/foreman/api/test_host.py +++ b/tests/foreman/api/test_host.py @@ -15,6 +15,7 @@ :CaseImportance: High """ + import http from fauxfactory import gen_choice, gen_integer, gen_ipaddr, gen_mac, gen_string diff --git a/tests/foreman/api/test_hostcollection.py b/tests/foreman/api/test_hostcollection.py index 985f989718e..3675b7a40b0 100644 --- a/tests/foreman/api/test_hostcollection.py +++ b/tests/foreman/api/test_hostcollection.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from random import choice, randint from broker import Broker diff --git a/tests/foreman/api/test_hostgroup.py b/tests/foreman/api/test_hostgroup.py index e0e6fd9ed91..5313ec0400f 100644 --- a/tests/foreman/api/test_hostgroup.py +++ b/tests/foreman/api/test_hostgroup.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from random import randint from fauxfactory import gen_string diff --git a/tests/foreman/api/test_http_proxy.py b/tests/foreman/api/test_http_proxy.py index 51ceb485361..04d12a2df2b 100644 --- a/tests/foreman/api/test_http_proxy.py +++ b/tests/foreman/api/test_http_proxy.py @@ -11,6 +11,7 @@ :CaseAutomation: Automated """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/api/test_ldapauthsource.py b/tests/foreman/api/test_ldapauthsource.py index 885d290e741..09742a8ff8d 100644 --- a/tests/foreman/api/test_ldapauthsource.py +++ b/tests/foreman/api/test_ldapauthsource.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from requests.exceptions import HTTPError diff --git a/tests/foreman/api/test_lifecycleenvironment.py b/tests/foreman/api/test_lifecycleenvironment.py index b68017efb87..61ec1a69a02 100644 --- a/tests/foreman/api/test_lifecycleenvironment.py +++ b/tests/foreman/api/test_lifecycleenvironment.py @@ -15,6 +15,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest from requests.exceptions import HTTPError diff --git a/tests/foreman/api/test_location.py b/tests/foreman/api/test_location.py index 2e54a4862fa..28c2d9c0b25 100644 --- a/tests/foreman/api/test_location.py +++ b/tests/foreman/api/test_location.py @@ -14,6 +14,7 @@ :CaseImportance: High """ + from random import randint from fauxfactory import gen_integer, gen_string diff --git a/tests/foreman/api/test_media.py b/tests/foreman/api/test_media.py index 79c5cb7f20e..f58f7982176 100644 --- a/tests/foreman/api/test_media.py +++ b/tests/foreman/api/test_media.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import random from fauxfactory import gen_string, gen_url @@ -38,7 +39,7 @@ def class_media(self, module_org, class_target_sat): ('name', 'new_name'), **parametrized( list(zip(valid_data_list().values(), valid_data_list().values(), strict=True)) - ) + ), ) def test_positive_crud_with_name(self, module_org, name, new_name, module_target_sat): """Create, update, delete media with valid name only diff --git a/tests/foreman/api/test_multiple_paths.py b/tests/foreman/api/test_multiple_paths.py index 8c683ca309a..b179e487fb7 100644 --- a/tests/foreman/api/test_multiple_paths.py +++ b/tests/foreman/api/test_multiple_paths.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import http from nailgun import client, entities, entity_fields diff --git a/tests/foreman/api/test_notifications.py b/tests/foreman/api/test_notifications.py index 7954ae9bd0d..5eeb0e60127 100644 --- a/tests/foreman/api/test_notifications.py +++ b/tests/foreman/api/test_notifications.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from mailbox import mbox from re import findall from tempfile import mkstemp @@ -21,7 +22,6 @@ from robottelo.config import settings from robottelo.constants import DEFAULT_LOC, DEFAULT_ORG -from robottelo.utils.issue_handlers import is_open @pytest.fixture @@ -44,6 +44,17 @@ def admin_user_with_localhost_email(target_sat): user.delete() +@pytest.fixture +def admin_user_with_custom_settings(request, admin_user_with_localhost_email): + """Admin user with custom properties set via parametrization. + `request.param` should be a dict-like value. + """ + for key, value in request.param.items(): + setattr(admin_user_with_localhost_email, key, value) + admin_user_with_localhost_email.update(list(request.param.keys())) + return admin_user_with_localhost_email + + @pytest.fixture def reschedule_long_running_tasks_notification(target_sat): """Reschedule long-running tasks checker from midnight (default) to every minute. @@ -54,8 +65,8 @@ def reschedule_long_running_tasks_notification(target_sat): assert ( target_sat.execute( - f"FOREMAN_TASKS_CHECK_LONG_RUNNING_TASKS_CRONLINE='{every_minute_cron_schedule}' " - "foreman-rake foreman_tasks:reschedule_long_running_tasks_checker" + "foreman-rake foreman_tasks:reschedule_long_running_tasks_checker " + f"FOREMAN_TASKS_CHECK_LONG_RUNNING_TASKS_CRONLINE='{every_minute_cron_schedule}'" ).status == 0 ) @@ -64,14 +75,14 @@ def reschedule_long_running_tasks_notification(target_sat): assert ( target_sat.execute( - f"FOREMAN_TASKS_CHECK_LONG_RUNNING_TASKS_CRONLINE='{default_cron_schedule}' " - "foreman-rake foreman_tasks:reschedule_long_running_tasks_checker" + "foreman-rake foreman_tasks:reschedule_long_running_tasks_checker " + f"FOREMAN_TASKS_CHECK_LONG_RUNNING_TASKS_CRONLINE='{default_cron_schedule}'" ).status == 0 ) -@pytest.fixture +@pytest.fixture(autouse=True) def start_postfix_service(target_sat): """Start postfix service (disabled by default).""" assert target_sat.execute('systemctl start postfix').status == 0 @@ -92,33 +103,61 @@ def clean_root_mailbox(target_sat): target_sat.execute(f'mv -f {root_mailbox_backup} {root_mailbox}') -@pytest.fixture -def wait_for_long_running_task_mail(target_sat, clean_root_mailbox, long_running_task): - """Wait until the long-running task ID is found in the Satellite's mbox file.""" - timeout = 300 +def wait_for_mail(sat_obj, mailbox_file, contains_string, timeout=300, delay=5): + """ + Wait until the desired string is found in the Satellite's mbox file. + """ try: wait_for( - func=target_sat.execute, - func_args=[f'grep --quiet {long_running_task["task"]["id"]} {clean_root_mailbox}'], - fail_condition=lambda res: res.status == 0, + func=sat_obj.execute, + func_args=[f"grep --quiet '{contains_string}' {mailbox_file}"], + fail_condition=lambda res: res.status != 0, timeout=timeout, - delay=5, + delay=delay, ) except TimedOutError as err: raise AssertionError( - f'No notification e-mail with long-running task ID {long_running_task["task"]["id"]} ' - f'has arrived to {clean_root_mailbox} after {timeout} seconds.' + f'No e-mail with text "{contains_string}" has arrived to mailbox {mailbox_file} ' + f'after {timeout} seconds.' ) from err return True @pytest.fixture -def root_mailbox_copy(target_sat, clean_root_mailbox, wait_for_long_running_task_mail): +def wait_for_long_running_task_mail(target_sat, clean_root_mailbox, long_running_task): + """Wait until the long-running task ID is found in the Satellite's mbox file.""" + return wait_for_mail( + sat_obj=target_sat, + mailbox_file=clean_root_mailbox, + contains_string=long_running_task["task"]["id"], + ) + + +@pytest.fixture +def wait_for_no_long_running_task_mail(target_sat, clean_root_mailbox, long_running_task): + """Wait and check that no long-running task ID is found in the Satellite's mbox file.""" + timeout = 120 + try: + wait_for_mail( + sat_obj=target_sat, + mailbox_file=clean_root_mailbox, + contains_string=long_running_task["task"]["id"], + timeout=timeout, + ) + except AssertionError: + return True + raise AssertionError( + f'E-mail with long running task ID "{long_running_task["task"]["id"]}" ' + f'should not have arrived to mailbox {clean_root_mailbox}!' + ) + + +@pytest.fixture +def root_mailbox_copy(target_sat, clean_root_mailbox): """Parsed local system copy of the Satellite's root user mailbox. :returns: :class:`mailbox.mbox` instance """ - assert wait_for_long_running_task_mail result = target_sat.execute(f'cat {clean_root_mailbox}') assert result.status == 0, f'Could not read mailbox {clean_root_mailbox} on Satellite host.' mbox_content = result.stdout @@ -152,7 +191,7 @@ def long_running_task(target_sat): 'password': settings.server.ssh_password, }, ) - sql_date_2_days_ago = "now() - INTERVAL \'2 days\'" + sql_date_2_days_ago = "now() - INTERVAL \'2 days\'" # fmt: skip result = target_sat.execute( "su - postgres -c \"psql foreman postgres < 0 +def test_positive_sync_upstream_repo_with_zst_compression( + module_org, module_product, module_target_sat +): + """Sync upstream repo having zst compression and verify it succeeds. + + :id: 1eddff2a-b6b5-420b-a0e8-ba6a05c11ca4 + + :expectedresults: Repo sync is successful and no zst type compression errors are present in /var/log/messages. + + :steps: + + 1. Sync upstream repository having zst type compression. + 2. Assert that no errors related to compression type are present in + /var/log/messages. + 3. Assert that sync was executed properly. + + :BZ: 2241934 + + :customerscenario: true + """ + repo = module_target_sat.api.Repository( + product=module_product, content_type='yum', url=FAKE_ZST_REPO + ).create() + assert repo.read().content_counts['rpm'] == 0 + sync = module_product.sync() + assert sync['result'] == 'success' + assert repo.read().content_counts['rpm'] > 0 + result = module_target_sat.execute( + 'grep pulp /var/log/messages | grep "Cannot detect compression type"' + ) + assert result.status == 1 + + @pytest.mark.tier1 def test_negative_upload_expired_manifest(module_org, target_sat): """Upload an expired manifest and attempt to refresh it diff --git a/tests/foreman/api/test_repository.py b/tests/foreman/api/test_repository.py index e73a4ae35e5..b5e381d9874 100644 --- a/tests/foreman/api/test_repository.py +++ b/tests/foreman/api/test_repository.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import re from string import punctuation import tempfile @@ -50,12 +51,6 @@ def repo_options_custom_product(request, module_org, module_target_sat): return options -@pytest.fixture -def env(module_org, module_target_sat): - """Create a new puppet environment.""" - return module_target_sat.api.Environment(organization=[module_org]).create() - - @pytest.fixture def repo(repo_options, module_target_sat): """Create a new repository.""" @@ -485,14 +480,13 @@ def test_negative_update_to_invalid_download_policy(self, repo, target_sat): @pytest.mark.tier1 @pytest.mark.parametrize( 'repo_options', - **datafactory.parametrized( - [ - {'content_type': content_type, 'download_policy': 'on_demand'} - for content_type in constants.REPO_TYPE - if content_type != 'yum' - ] - ), + [ + {'content_type': content_type, 'download_policy': 'on_demand'} + for content_type in constants.REPO_TYPE + if content_type != 'yum' + ], indirect=True, + ids=lambda x: x['content_type'], ) def test_negative_create_non_yum_with_download_policy(self, repo_options, target_sat): """Verify that non-YUM repositories cannot be created with @@ -1531,7 +1525,7 @@ def test_positive_sync_kickstart_check_os( 1. OS with corresponding version was created. """ - distro = f'rhel{distro} + "_bos"' if distro > 7 else f'rhel{distro}' + distro = f'rhel{distro}_bos' if distro > 7 else f'rhel{distro}' repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', org_id=module_entitlement_manifest_org.id, @@ -1806,7 +1800,7 @@ def test_negative_synchronize_private_registry_wrong_repo(self, repo_options, re :BZ: 1475121, 1580510 """ - msg = "404, message=\'Not Found\'" + msg = "404, message='Not Found'" with pytest.raises(TaskFailedError, match=msg): repo.sync() @@ -2135,7 +2129,7 @@ class TestSRPMRepository: @pytest.mark.upgrade @pytest.mark.tier2 def test_positive_srpm_upload_publish_promote_cv( - self, module_org, env, repo, module_target_sat + self, module_org, module_lce, repo, module_target_sat ): """Upload SRPM to repository, add repository to content view and publish, promote content view @@ -2169,7 +2163,6 @@ def test_positive_srpm_upload_publish_promote_cv( @pytest.mark.upgrade @pytest.mark.tier2 - @pytest.mark.skip('Uses deprecated SRPM repository') @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @@ -2178,7 +2171,7 @@ def test_positive_srpm_upload_publish_promote_cv( **datafactory.parametrized({'fake_srpm': {'url': repo_constants.FAKE_YUM_SRPM_REPO}}), indirect=True, ) - def test_positive_repo_sync_publish_promote_cv(self, module_org, env, repo, target_sat): + def test_positive_repo_sync_publish_promote_cv(self, module_org, module_lce, repo, target_sat): """Synchronize repository with SRPMs, add repository to content view and publish, promote content view @@ -2202,8 +2195,8 @@ def test_positive_repo_sync_publish_promote_cv(self, module_org, env, repo, targ >= 3 ) - cv.version[0].promote(data={'environment_ids': env.id, 'force': False}) - assert len(target_sat.api.Srpms().search(query={'environment_id': env.id})) == 3 + cv.version[0].promote(data={'environment_ids': module_lce.id, 'force': False}) + assert len(target_sat.api.Srpms().search(query={'environment_id': module_lce.id})) >= 3 class TestSRPMRepositoryIgnoreContent: diff --git a/tests/foreman/api/test_repository_set.py b/tests/foreman/api/test_repository_set.py index 8f8c56ea04e..1b80b0722e4 100644 --- a/tests/foreman/api/test_repository_set.py +++ b/tests/foreman/api/test_repository_set.py @@ -14,6 +14,7 @@ :CaseImportance: High """ + import pytest from robottelo.constants import PRDS, REPOSET diff --git a/tests/foreman/api/test_rhc.py b/tests/foreman/api/test_rhc.py index 30559fe406e..fa39c4cb320 100644 --- a/tests/foreman/api/test_rhc.py +++ b/tests/foreman/api/test_rhc.py @@ -6,11 +6,12 @@ :CaseComponent: RHCloud -:Team: Platform +:Team: Phoenix-subscriptions :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/api/test_rhcloud_inventory.py b/tests/foreman/api/test_rhcloud_inventory.py index 1f7af05b92a..14cff526045 100644 --- a/tests/foreman/api/test_rhcloud_inventory.py +++ b/tests/foreman/api/test_rhcloud_inventory.py @@ -6,11 +6,12 @@ :CaseComponent: RHCloud -:Team: Platform +:Team: Phoenix-subscriptions :CaseImportance: High """ + from fauxfactory import gen_alphanumeric, gen_string import pytest diff --git a/tests/foreman/api/test_rhsm.py b/tests/foreman/api/test_rhsm.py index 096163a8719..2cf6c96562c 100644 --- a/tests/foreman/api/test_rhsm.py +++ b/tests/foreman/api/test_rhsm.py @@ -15,6 +15,7 @@ :CaseImportance: High """ + import http from nailgun import client diff --git a/tests/foreman/api/test_role.py b/tests/foreman/api/test_role.py index 7d5a12bc591..a9f3211fa49 100644 --- a/tests/foreman/api/test_role.py +++ b/tests/foreman/api/test_role.py @@ -15,6 +15,7 @@ :CaseImportance: High """ + from nailgun.config import ServerConfig import pytest from requests.exceptions import HTTPError @@ -183,7 +184,7 @@ def create_ldap(self, ad_data, target_sat, module_location, module_org): ldap_user_passwd=ad_data['ldap_user_passwd'], authsource=target_sat.api.AuthSourceLDAP( onthefly_register=True, - account=fr"{ad_data['workgroup']}\{ad_data['ldap_user_name']}", + account=rf"{ad_data['workgroup']}\{ad_data['ldap_user_name']}", account_password=ad_data['ldap_user_passwd'], base_dn=ad_data['base_dn'], groups_base=ad_data['group_base_dn'], diff --git a/tests/foreman/api/test_settings.py b/tests/foreman/api/test_settings.py index 4cbb5609bd0..aaae44def82 100644 --- a/tests/foreman/api/test_settings.py +++ b/tests/foreman/api/test_settings.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import random import pytest diff --git a/tests/foreman/api/test_subnet.py b/tests/foreman/api/test_subnet.py index b4882116f8e..3cc45213a39 100644 --- a/tests/foreman/api/test_subnet.py +++ b/tests/foreman/api/test_subnet.py @@ -16,6 +16,7 @@ :CaseImportance: High """ + import re import pytest @@ -336,9 +337,8 @@ def test_negative_update_parameter(new_name, target_sat): sub_param.update(['name']) -@pytest.mark.stubbed @pytest.mark.tier2 -def test_positive_update_subnet_parameter_host_impact(): +def test_positive_update_subnet_parameter_host_impact(target_sat): """Update in parameter name and value from subnet component updates the parameter in host inheriting that subnet @@ -353,12 +353,29 @@ def test_positive_update_subnet_parameter_host_impact(): :expectedresults: 1. The inherited subnet parameter in host should have - updated name and value - 2. The inherited subnet parameter in host enc should have - updated name and value + updated name and value. :BZ: 1470014 """ + parameter = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] + org = target_sat.api.Organization().create() + loc = target_sat.api.Location(organization=[org]).create() + org_subnet = target_sat.api.Subnet( + location=[loc], organization=[org], subnet_parameters_attributes=parameter + ).create() + assert parameter[0]['name'] == org_subnet.subnet_parameters_attributes[0]['name'] + assert parameter[0]['value'] == org_subnet.subnet_parameters_attributes[0]['value'] + host = target_sat.api.Host(location=loc, organization=org, subnet=org_subnet).create() + parameter_new_value = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] + org_subnet.subnet_parameters_attributes = parameter_new_value + org_subnet.update(['subnet_parameters_attributes']) + assert ( + host.subnet.read().subnet_parameters_attributes[0]['name'] == parameter_new_value[0]['name'] + ) + assert ( + host.subnet.read().subnet_parameters_attributes[0]['value'] + == parameter_new_value[0]['value'] + ) @pytest.mark.tier1 diff --git a/tests/foreman/api/test_subscription.py b/tests/foreman/api/test_subscription.py index 282dac528b0..019815bba22 100644 --- a/tests/foreman/api/test_subscription.py +++ b/tests/foreman/api/test_subscription.py @@ -15,6 +15,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string from nailgun.config import ServerConfig from nailgun.entity_mixins import TaskFailedError diff --git a/tests/foreman/api/test_syncplan.py b/tests/foreman/api/test_syncplan.py index 3ecbfac8714..95f26664827 100644 --- a/tests/foreman/api/test_syncplan.py +++ b/tests/foreman/api/test_syncplan.py @@ -15,6 +15,7 @@ :CaseImportance: High """ + from datetime import datetime, timedelta from time import sleep diff --git a/tests/foreman/api/test_template_combination.py b/tests/foreman/api/test_template_combination.py index 7e9d5f11be8..8575d2ee3c2 100644 --- a/tests/foreman/api/test_template_combination.py +++ b/tests/foreman/api/test_template_combination.py @@ -9,6 +9,7 @@ :Team: Rocket """ + import pytest from requests.exceptions import HTTPError diff --git a/tests/foreman/api/test_templatesync.py b/tests/foreman/api/test_templatesync.py index bda401ef373..c19a932cece 100644 --- a/tests/foreman/api/test_templatesync.py +++ b/tests/foreman/api/test_templatesync.py @@ -9,6 +9,7 @@ :Team: Endeavour """ + import base64 import json import time diff --git a/tests/foreman/api/test_user.py b/tests/foreman/api/test_user.py index 75dd19e6f55..7c913ae3f43 100644 --- a/tests/foreman/api/test_user.py +++ b/tests/foreman/api/test_user.py @@ -15,6 +15,7 @@ :CaseImportance: High """ + import json import re @@ -667,7 +668,7 @@ def create_ldap(self, ad_data, module_target_sat): ldap_user_passwd=ad_data['ldap_user_passwd'], authsource=module_target_sat.api.AuthSourceLDAP( onthefly_register=True, - account=fr"{ad_data['workgroup']}\{ad_data['ldap_user_name']}", + account=rf"{ad_data['workgroup']}\{ad_data['ldap_user_name']}", account_password=ad_data['ldap_user_passwd'], base_dn=ad_data['base_dn'], groups_base=ad_data['group_base_dn'], diff --git a/tests/foreman/api/test_usergroup.py b/tests/foreman/api/test_usergroup.py index 95d1547934f..8c51419d92d 100644 --- a/tests/foreman/api/test_usergroup.py +++ b/tests/foreman/api/test_usergroup.py @@ -14,6 +14,7 @@ :CaseImportance: High """ + from random import randint from fauxfactory import gen_string diff --git a/tests/foreman/api/test_webhook.py b/tests/foreman/api/test_webhook.py index bb6c3637c2b..d8741bab4a6 100644 --- a/tests/foreman/api/test_webhook.py +++ b/tests/foreman/api/test_webhook.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import re import pytest diff --git a/tests/foreman/cli/test_abrt.py b/tests/foreman/cli/test_abrt.py index 0543679b733..2c223476f8f 100644 --- a/tests/foreman/cli/test_abrt.py +++ b/tests/foreman/cli/test_abrt.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest pytestmark = [pytest.mark.stubbed] diff --git a/tests/foreman/cli/test_acs.py b/tests/foreman/cli/test_acs.py index f7ad33cf204..50c82d69443 100644 --- a/tests/foreman/cli/test_acs.py +++ b/tests/foreman/cli/test_acs.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_alphanumeric import pytest diff --git a/tests/foreman/cli/test_activationkey.py b/tests/foreman/cli/test_activationkey.py index cbcfaea5700..3f90806a9d4 100644 --- a/tests/foreman/cli/test_activationkey.py +++ b/tests/foreman/cli/test_activationkey.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from random import choice import re diff --git a/tests/foreman/cli/test_ansible.py b/tests/foreman/cli/test_ansible.py index 8cffe15b20d..3bc5872dacd 100644 --- a/tests/foreman/cli/test_ansible.py +++ b/tests/foreman/cli/test_ansible.py @@ -4,156 +4,521 @@ :CaseAutomation: Automated -:CaseComponent: Ansible-ConfigurationManagement - :Team: Rocket :CaseImportance: High - """ + +from time import sleep + from fauxfactory import gen_string import pytest from robottelo.config import settings -@pytest.mark.e2e -@pytest.mark.no_containers -@pytest.mark.rhel_ver_match('[^6].*') -def test_positive_ansible_e2e(target_sat, module_org, rhel_contenthost): +def assert_job_invocation_result( + sat, invocation_command_id, client_hostname, expected_result='success' +): + """Asserts the job invocation finished with the expected result and fetches job output + when error occurs. Result is one of: success, pending, error, warning""" + result = sat.cli.JobInvocation.info({'id': invocation_command_id}) + try: + assert result[expected_result] == '1' + except AssertionError as err: + raise AssertionError( + 'host output: {}'.format( + ' '.join( + sat.cli.JobInvocation.get_output( + {'id': invocation_command_id, 'host': client_hostname} + ) + ) + ) + ) from err + + +@pytest.mark.upgrade +class TestAnsibleCfgMgmt: + """Test class for Configuration Management with Ansible + + :CaseComponent: Ansible-ConfigurationManagement """ - Test successful execution of Ansible Job on host. - :id: 0c52bc63-a41a-4f48-a980-fe49b4ecdbdc + @pytest.mark.e2e + @pytest.mark.no_containers + @pytest.mark.rhel_ver_match('[^6].*') + def test_positive_ansible_e2e( + self, target_sat, module_sca_manifest_org, module_ak_with_cv, rhel_contenthost + ): + """ + Test successful execution of Ansible Job on host. - :steps: - 1. Register a content host with satellite - 2. Import a role into satellite - 3. Assign that role to a host - 4. Assert that the role and variable were assigned to the host successfully - 5. Run the Ansible playbook associated with that role - 6. Check if the job is executed successfully. - 7. Disassociate the Role from the host. - 8. Delete the assigned ansible role + :id: 0c52bc63-a41a-4f48-a980-fe49b4ecdbdc - :expectedresults: - 1. Host should be assigned the proper role. - 2. Job execution must be successful. - 3. Operations performed with hammer must be successful. + :steps: + 1. Register a content host with satellite + 2. Import a role into satellite + 3. Assign that role to a host + 4. Assert that the role and variable were assigned to the host successfully + 5. Run the Ansible playbook associated with that role + 6. Check if the job is executed successfully. + 7. Disassociate the Role from the host. + 8. Delete the assigned ansible role - :BZ: 2154184 + :expectedresults: + 1. Host should be assigned the proper role. + 2. Job execution must be successful. + 3. Operations performed with hammer must be successful. - :customerscenario: true + :BZ: 2154184 - :CaseImportance: Critical - """ - SELECTED_ROLE = 'RedHatInsights.insights-client' - SELECTED_ROLE_1 = 'theforeman.foreman_scap_client' - SELECTED_VAR = gen_string('alpha') - # disable batch tasks to test BZ#2154184 - target_sat.cli.Settings.set({'name': 'foreman_tasks_proxy_batch_trigger', 'value': 'false'}) - if rhel_contenthost.os_version.major <= 7: - rhel_contenthost.create_custom_repos(rhel7=settings.repos.rhel7_os) - assert rhel_contenthost.execute('yum install -y insights-client').status == 0 - rhel_contenthost.install_katello_ca(target_sat) - rhel_contenthost.register_contenthost(module_org.label, force=True) - assert rhel_contenthost.subscribed - rhel_contenthost.add_rex_key(satellite=target_sat) - proxy_id = target_sat.nailgun_smart_proxy.id - target_host = rhel_contenthost.nailgun_host - - target_sat.cli.Ansible.roles_sync( - {'role-names': f'{SELECTED_ROLE},{SELECTED_ROLE_1}', 'proxy-id': proxy_id} - ) + :customerscenario: true + """ + SELECTED_ROLE = 'RedHatInsights.insights-client' + SELECTED_ROLE_1 = 'theforeman.foreman_scap_client' + SELECTED_VAR = gen_string('alpha') + proxy_id = target_sat.nailgun_smart_proxy.id + # disable batch tasks to test BZ#2154184 + target_sat.cli.Settings.set({'name': 'foreman_tasks_proxy_batch_trigger', 'value': 'false'}) + result = rhel_contenthost.register( + module_sca_manifest_org, None, module_ak_with_cv.name, target_sat + ) + assert result.status == 0, f'Failed to register host: {result.stderr}' + if rhel_contenthost.os_version.major <= 7: + rhel_contenthost.create_custom_repos(rhel7=settings.repos.rhel7_os) + assert rhel_contenthost.execute('yum install -y insights-client').status == 0 + target_host = rhel_contenthost.nailgun_host - result = target_sat.cli.Host.ansible_roles_add( - {'id': target_host.id, 'ansible-role': SELECTED_ROLE} - ) - assert 'Ansible role has been associated.' in result[0]['message'] + target_sat.cli.Ansible.roles_sync( + {'role-names': f'{SELECTED_ROLE},{SELECTED_ROLE_1}', 'proxy-id': proxy_id} + ) + result = target_sat.cli.Host.ansible_roles_add( + {'id': target_host.id, 'ansible-role': SELECTED_ROLE} + ) + assert 'Ansible role has been associated.' in result[0]['message'] - target_sat.cli.Ansible.variables_create( - {'variable': SELECTED_VAR, 'ansible-role': SELECTED_ROLE} - ) + target_sat.cli.Ansible.variables_create( + {'variable': SELECTED_VAR, 'ansible-role': SELECTED_ROLE} + ) - assert SELECTED_ROLE, ( - SELECTED_VAR in target_sat.cli.Ansible.variables_info({'name': SELECTED_VAR}).stdout - ) - template_id = ( - target_sat.api.JobTemplate() - .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] - .id - ) - job = target_sat.api.JobInvocation().run( - synchronous=False, - data={ - 'job_template_id': template_id, - 'targeting_type': 'static_query', - 'search_query': f'name = {rhel_contenthost.hostname}', - }, - ) - target_sat.wait_for_tasks( - f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000 - ) - result = target_sat.api.JobInvocation(id=job['id']).read() - assert result.succeeded == 1 + assert SELECTED_ROLE, ( + SELECTED_VAR in target_sat.cli.Ansible.variables_info({'name': SELECTED_VAR}).stdout + ) + template_id = ( + target_sat.api.JobTemplate() + .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] + .id + ) + job = target_sat.api.JobInvocation().run( + synchronous=False, + data={ + 'job_template_id': template_id, + 'targeting_type': 'static_query', + 'search_query': f'name = {rhel_contenthost.hostname}', + }, + ) + target_sat.wait_for_tasks( + f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000 + ) + result = target_sat.api.JobInvocation(id=job['id']).read() + assert result.succeeded == 1 - result = target_sat.cli.Host.ansible_roles_assign( - {'id': target_host.id, 'ansible-roles': f'{SELECTED_ROLE},{SELECTED_ROLE_1}'} - ) - assert 'Ansible roles were assigned to the host' in result[0]['message'] + result = target_sat.cli.Host.ansible_roles_assign( + {'id': target_host.id, 'ansible-roles': f'{SELECTED_ROLE},{SELECTED_ROLE_1}'} + ) + assert 'Ansible roles were assigned to the host' in result[0]['message'] - result = target_sat.cli.Host.ansible_roles_remove( - {'id': target_host.id, 'ansible-role': SELECTED_ROLE} - ) - assert 'Ansible role has been disassociated.' in result[0]['message'] + result = target_sat.cli.Host.ansible_roles_remove( + {'id': target_host.id, 'ansible-role': SELECTED_ROLE} + ) + assert 'Ansible role has been disassociated.' in result[0]['message'] - result = target_sat.cli.Ansible.roles_delete({'name': SELECTED_ROLE}) - assert f'Ansible role [{SELECTED_ROLE}] was deleted.' in result[0]['message'] + result = target_sat.cli.Ansible.roles_delete({'name': SELECTED_ROLE}) + assert f'Ansible role [{SELECTED_ROLE}] was deleted.' in result[0]['message'] - assert SELECTED_ROLE, ( - SELECTED_VAR not in target_sat.cli.Ansible.variables_info({'name': SELECTED_VAR}).stdout - ) + assert SELECTED_ROLE, ( + SELECTED_VAR not in target_sat.cli.Ansible.variables_info({'name': SELECTED_VAR}).stdout + ) + @pytest.mark.e2e + @pytest.mark.tier2 + def test_add_and_remove_ansible_role_hostgroup(self, target_sat): + """ + Test add and remove functionality for ansible roles in hostgroup via CLI -@pytest.mark.e2e -@pytest.mark.tier2 -def test_add_and_remove_ansible_role_hostgroup(target_sat): - """ - Test add and remove functionality for ansible roles in hostgroup via CLI + :id: 2c6fda14-4cd2-490a-b7ef-7a08f8164fad + + :customerscenario: true + + :steps: + 1. Create a hostgroup + 2. Sync few ansible roles + 3. Assign a few ansible roles with the host group + 4. Add some ansible role with the host group + 5. Remove the added ansible roles from the host group - :id: 2c6fda14-4cd2-490a-b7ef-7a08f8164fad + :expectedresults: + 1. Ansible role assign/add/remove functionality should work as expected in CLI - :customerscenario: true + :BZ: 2029402 + """ + ROLES = [ + 'theforeman.foreman_scap_client', + 'redhat.satellite.hostgroups', + 'RedHatInsights.insights-client', + ] + proxy_id = target_sat.nailgun_smart_proxy.id + hg_name = gen_string('alpha') + result = target_sat.cli.HostGroup.create({'name': hg_name}) + assert result['name'] == hg_name + target_sat.cli.Ansible.roles_sync({'role-names': ROLES, 'proxy-id': proxy_id}) + result = target_sat.cli.HostGroup.ansible_roles_assign( + {'name': hg_name, 'ansible-roles': f'{ROLES[1]},{ROLES[2]}'} + ) + assert 'Ansible roles were assigned to the hostgroup' in result[0]['message'] + result = target_sat.cli.HostGroup.ansible_roles_add( + {'name': hg_name, 'ansible-role': ROLES[0]} + ) + assert 'Ansible role has been associated.' in result[0]['message'] + result = target_sat.cli.HostGroup.ansible_roles_remove( + {'name': hg_name, 'ansible-role': ROLES[0]} + ) + assert 'Ansible role has been disassociated.' in result[0]['message'] - :steps: - 1. Create a hostgroup - 2. Sync few ansible roles - 3. Assign a few ansible roles with the host group - 4. Add some ansible role with the host group - 5. Remove the added ansible roles from the host group - :expectedresults: - 1. Ansible role assign/add/remove functionality should work as expected in CLI +@pytest.mark.tier3 +@pytest.mark.upgrade +class TestAnsibleREX: + """Test class for remote execution via Ansible - :BZ: 2029402 + :CaseComponent: Ansible-RemoteExecution """ - ROLES = [ - 'theforeman.foreman_scap_client', - 'redhat.satellite.hostgroups', - 'RedHatInsights.insights-client', - ] - proxy_id = target_sat.nailgun_smart_proxy.id - hg_name = gen_string('alpha') - result = target_sat.cli.HostGroup.create({'name': hg_name}) - assert result['name'] == hg_name - target_sat.cli.Ansible.roles_sync({'role-names': ROLES, 'proxy-id': proxy_id}) - result = target_sat.cli.HostGroup.ansible_roles_assign( - {'name': hg_name, 'ansible-roles': f'{ROLES[1]},{ROLES[2]}'} - ) - assert 'Ansible roles were assigned to the hostgroup' in result[0]['message'] - result = target_sat.cli.HostGroup.ansible_roles_add({'name': hg_name, 'ansible-role': ROLES[0]}) - assert 'Ansible role has been associated.' in result[0]['message'] - result = target_sat.cli.HostGroup.ansible_roles_remove( - {'name': hg_name, 'ansible-role': ROLES[0]} + + @pytest.mark.pit_client + @pytest.mark.pit_server + @pytest.mark.rhel_ver_match('[^6]') + def test_positive_run_effective_user_job(self, rex_contenthost, target_sat): + """Tests Ansible REX job having effective user runs successfully + + :id: a5fa20d8-c2bd-4bbf-a6dc-bf307b59dd8c + + :steps: + 0. Create a VM and register to SAT and prepare for REX (ssh key) + 1. Run Ansible Command job for the host to create a user + 2. Run Ansible Command job using effective user + 3. Check the job result at the host is done under that user + + :expectedresults: multiple asserts along the code + + :parametrized: yes + """ + client = rex_contenthost + # create a user on client via remote job + username = gen_string('alpha') + filename = gen_string('alpha') + make_user_job = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Run Command - Ansible Default', + 'inputs': f'command=useradd -m {username}', + 'search-query': f'name ~ {client.hostname}', + } + ) + assert_job_invocation_result(target_sat, make_user_job['id'], client.hostname) + # create a file as new user + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Run Command - Ansible Default', + 'inputs': f'command=touch /home/{username}/{filename}', + 'search-query': f'name ~ {client.hostname}', + 'effective-user': username, + } + ) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) + + # check the file owner + result = client.execute( + f'''stat -c '%U' /home/{username}/{filename}''', + ) + # assert the file is owned by the effective user + assert username == result.stdout.strip('\n'), 'file ownership mismatch' + + @pytest.mark.rhel_ver_list([8]) + def test_positive_run_reccuring_job(self, rex_contenthost, target_sat): + """Tests Ansible REX reccuring job runs successfully multiple times + + :id: 49b0d31d-58f9-47f1-aa5d-561a1dcb0d66 + + :setup: + 1. Create a VM, register to SAT and configure REX (ssh-key) + + :steps: + 1. Run recurring Ansible Command job for the host + 2. Check the multiple job results at the host + + :expectedresults: multiple asserts along the code + + :bz: 2129432 + + :customerscenario: true + + :parametrized: yes + """ + client = rex_contenthost + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Run Command - Ansible Default', + 'inputs': 'command=ls', + 'search-query': f'name ~ {client.hostname}', + 'cron-line': '* * * * *', # every minute + 'max-iteration': 2, # just two runs + } + ) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) + sleep(150) + rec_logic = target_sat.cli.RecurringLogic.info({'id': result['recurring-logic-id']}) + assert rec_logic['state'] == 'finished' + assert rec_logic['iteration'] == '2' + # 2129432 + rec_logic_keys = rec_logic.keys() + assert 'action' in rec_logic_keys + assert 'last-occurrence' in rec_logic_keys + assert 'next-occurrence' in rec_logic_keys + assert 'state' in rec_logic_keys + assert 'purpose' in rec_logic_keys + assert 'iteration' in rec_logic_keys + assert 'iteration-limit' in rec_logic_keys + + @pytest.mark.rhel_ver_list([8]) + def test_positive_run_concurrent_jobs(self, rex_contenthosts, target_sat): + """Tests Ansible REX concurent jobs without batch trigger + + :id: ad0f108c-03f2-49c7-8732-b1056570567b + + :steps: + 1. Create 2 hosts, disable foreman_tasks_proxy_batch_trigger + 2. Run Ansible Command job with concurrency-setting + + :expectedresults: multiple asserts along the code + + :BZ: 1817320 + + :customerscenario: true + + :parametrized: yes + """ + clients = rex_contenthosts + param_name = 'foreman_tasks_proxy_batch_trigger' + target_sat.cli.GlobalParameter().set({'name': param_name, 'value': 'false'}) + output_msgs = [] + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Run Command - Ansible Default', + 'inputs': 'command=ls', + 'search-query': f'name ~ {clients[0].hostname} or name ~ {clients[1].hostname}', + 'concurrency-level': 2, + } + ) + for vm in clients: + output_msgs.append( + 'host output from {}: {}'.format( + vm.hostname, + ' '.join( + target_sat.cli.JobInvocation.get_output( + {'id': invocation_command['id'], 'host': vm.hostname} + ) + ), + ) + ) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) + assert result['success'] == '2', output_msgs + target_sat.cli.GlobalParameter().delete({'name': param_name}) + assert len(target_sat.cli.GlobalParameter().list({'search': param_name})) == 0 + + @pytest.mark.rhel_ver_list([8]) + def test_positive_run_serial(self, rex_contenthosts, target_sat): + """Tests subtasks in a job run one by one when concurrency level set to 1 + + :id: 5ce39447-82d0-42df-81be-16ed3d67a2a4 + + :setup: + 1. Create 2 hosts, register to SAT and configure REX (ssh-key) + + :steps: + 1. Run a bash command job with concurrency level 1 + + :expectedresults: First subtask should run immediately, second one after the first one finishes + + :parametrized: yes + """ + hosts = rex_contenthosts + output_msgs = [] + template_file = f'/root/{gen_string("alpha")}.template' + target_sat.execute( + f"echo 'rm /root/test-<%= @host %>; echo $(date +%s) >> /root/test-<%= @host %>; sleep 120; echo $(date +%s) >> /root/test-<%= @host %>' > {template_file}" + ) + template = target_sat.cli.JobTemplate.create( + { + 'name': gen_string('alpha'), + 'file': template_file, + 'job-category': 'Commands', + 'provider-type': 'script', + } + ) + invocation = target_sat.cli_factory.job_invocation( + { + 'job-template': template['name'], + 'search-query': f'name ~ {hosts[0].hostname} or name ~ {hosts[1].hostname}', + 'concurrency-level': 1, + } + ) + for vm in hosts: + output_msgs.append( + 'host output from {}: {}'.format( + vm.hostname, + ' '.join( + target_sat.cli.JobInvocation.get_output( + {'id': invocation['id'], 'host': vm.hostname} + ) + ), + ) + ) + result = target_sat.cli.JobInvocation.info({'id': invocation['id']}) + assert result['success'] == '2', output_msgs + # assert for time diffs + file1 = hosts[0].execute('cat /root/test-$(hostname)').stdout + file2 = hosts[1].execute('cat /root/test-$(hostname)').stdout + file1_start, file1_end = map(int, file1.rstrip().split('\n')) + file2_start, file2_end = map(int, file2.rstrip().split('\n')) + if file1_start > file2_start: + file1_start, file1_end, file2_start, file2_end = ( + file2_start, + file2_end, + file1_start, + file1_end, + ) + assert file1_end - file1_start >= 120 + assert file2_end - file2_start >= 120 + assert file2_start >= file1_end # the jobs did NOT run concurrently + + @pytest.mark.e2e + @pytest.mark.no_containers + @pytest.mark.pit_server + @pytest.mark.rhel_ver_match('[^6].*') + @pytest.mark.skipif( + (not settings.robottelo.repos_hosting_url), reason='Missing repos_hosting_url' ) - assert 'Ansible role has been disassociated.' in result[0]['message'] + def test_positive_run_packages_and_services_job( + self, rhel_contenthost, module_sca_manifest_org, module_ak_with_cv, target_sat + ): + """Tests Ansible REX job can install packages and start services + + :id: 47ed82fb-77ca-43d6-a52e-f62bae5d3a42 + + :setup: + 1. Create a VM, register to SAT and configure REX (ssh-key) + + :steps: + 1. Run Ansible Package job for the host to install a package + 2. Check the package is present at the host + 3. Run Ansible Service job for the host to start a service + 4. Check the service is started on the host + + :expectedresults: multiple asserts along the code + + :bz: 1872688, 1811166 + + :customerscenario: true + + :parametrized: yes + """ + client = rhel_contenthost + packages = ['tapir'] + result = client.register( + module_sca_manifest_org, + None, + module_ak_with_cv.name, + target_sat, + repo=settings.repos.yum_3.url, + ) + assert result.status == 0, f'Failed to register host: {result.stderr}' + # install package + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Package Action - Ansible Default', + 'inputs': 'state=latest, name={}'.format(*packages), + 'search-query': f'name ~ {client.hostname}', + } + ) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) + result = client.run(f'rpm -q {" ".join(packages)}') + assert result.status == 0 + + # stop a service + service = 'rsyslog' + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Service Action - Ansible Default', + 'inputs': f'state=stopped, name={service}', + 'search-query': f"name ~ {client.hostname}", + } + ) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) + result = client.execute(f'systemctl status {service}') + assert result.status == 3 + + # start it again + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Service Action - Ansible Default', + 'inputs': f'state=started, name={service}', + 'search-query': f'name ~ {client.hostname}', + } + ) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) + result = client.execute(f'systemctl status {service}') + assert result.status == 0 + + @pytest.mark.rhel_ver_list([8]) + def test_positive_install_ansible_collection(self, rex_contenthost, target_sat): + """Test whether Ansible collection can be installed via Ansible REX + + :id: ad25aee5-4ea3-4743-a301-1c6271856f79 + + :steps: + 1. Upload a manifest. + 2. Register content host to Satellite with REX setup + 3. Enable Ansible repo on content host. + 4. Install ansible or ansible-core package + 5. Run REX job to install Ansible collection on content host. + + :expectedresults: Ansible collection can be installed on content host via REX. + """ + client = rex_contenthost + # Enable Ansible repository and Install ansible or ansible-core package + client.create_custom_repos(rhel8_aps=settings.repos.rhel8_os.appstream) + assert client.execute('dnf -y install ansible-core').status == 0 + + collection_job = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Ansible Collection - Install from Galaxy', + 'inputs': 'ansible_collections_list="oasis_roles.system"', + 'search-query': f'name ~ {client.hostname}', + } + ) + result = target_sat.cli.JobInvocation.info({'id': collection_job['id']}) + assert result['success'] == '1' + collection_path = client.execute('ls /etc/ansible/collections/ansible_collections').stdout + assert 'oasis_roles' in collection_path + + # Extend test with custom collections_path advanced input field + collection_job = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Ansible Collection - Install from Galaxy', + 'inputs': 'ansible_collections_list="oasis_roles.system", collections_path="~/"', + 'search-query': f'name ~ {client.hostname}', + } + ) + result = target_sat.cli.JobInvocation.info({'id': collection_job['id']}) + assert result['success'] == '1' + collection_path = client.execute('ls ~/ansible_collections').stdout + assert 'oasis_roles' in collection_path diff --git a/tests/foreman/cli/test_architecture.py b/tests/foreman/cli/test_architecture.py index 7578fe577b6..51959e2a9a8 100644 --- a/tests/foreman/cli/test_architecture.py +++ b/tests/foreman/cli/test_architecture.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_choice import pytest diff --git a/tests/foreman/cli/test_auth.py b/tests/foreman/cli/test_auth.py index 59aafee0be5..4136ffb034e 100644 --- a/tests/foreman/cli/test_auth.py +++ b/tests/foreman/cli/test_auth.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + from time import sleep from fauxfactory import gen_string @@ -29,9 +30,7 @@ def configure_sessions(satellite, enable=True, add_default_creds=False): """Enables the `use_sessions` option in hammer config""" result = satellite.execute( '''sed -i -e '/username/d;/password/d;/use_sessions/d' {0};\ - echo ' :use_sessions: {1}' >> {0}'''.format( - HAMMER_CONFIG, 'true' if enable else 'false' - ) + echo ' :use_sessions: {1}' >> {0}'''.format(HAMMER_CONFIG, 'true' if enable else 'false') ) if result.status == 0 and add_default_creds: result = satellite.execute( diff --git a/tests/foreman/cli/test_bootdisk.py b/tests/foreman/cli/test_bootdisk.py index e6515e1af9f..9a2e1e4a987 100644 --- a/tests/foreman/cli/test_bootdisk.py +++ b/tests/foreman/cli/test_bootdisk.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_mac, gen_string import pytest @@ -20,6 +21,7 @@ @pytest.mark.parametrize('module_sync_kickstart_content', [7, 8, 9], indirect=True) def test_positive_bootdisk_download_https( + request, module_location, module_sync_kickstart_content, module_provisioning_capsule, @@ -79,8 +81,12 @@ def test_positive_bootdisk_download_https( 'lifecycle-environment-id': module_lce_library.id, } ) + + @request.addfinalizer + def _finalize(): + module_target_sat.api.Host(id=host.id).delete() + module_target_sat.api.Media(id=media['id']).delete() + # Check if full-host bootdisk can be downloaded. bootdisk = module_target_sat.cli.Bootdisk.host({'host-id': host['id'], 'full': 'true'}) assert 'Successfully downloaded host disk image' in bootdisk['message'] - module_target_sat.api.Host(id=host.id).delete() - module_target_sat.api.Media(id=media['id']).delete() diff --git a/tests/foreman/cli/test_bootstrap_script.py b/tests/foreman/cli/test_bootstrap_script.py index f4b71c24bd1..65872032d1d 100644 --- a/tests/foreman/cli/test_bootstrap_script.py +++ b/tests/foreman/cli/test_bootstrap_script.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest diff --git a/tests/foreman/cli/test_capsule.py b/tests/foreman/cli/test_capsule.py index ddcc100326a..3f7e98b104d 100644 --- a/tests/foreman/cli/test_capsule.py +++ b/tests/foreman/cli/test_capsule.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import pytest pytestmark = [pytest.mark.run_in_one_thread] diff --git a/tests/foreman/cli/test_classparameters.py b/tests/foreman/cli/test_classparameters.py index 022ceed0aac..f6b0ca7a4ee 100644 --- a/tests/foreman/cli/test_classparameters.py +++ b/tests/foreman/cli/test_classparameters.py @@ -11,6 +11,7 @@ :Team: Rocket """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/cli/test_computeresource_azurerm.py b/tests/foreman/cli/test_computeresource_azurerm.py index 1756a0ed674..1c64a55c5bc 100644 --- a/tests/foreman/cli/test_computeresource_azurerm.py +++ b/tests/foreman/cli/test_computeresource_azurerm.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest @@ -343,8 +344,9 @@ def class_host_ft( Provisions the host on AzureRM using Finish template Later in tests this host will be used to perform assertions """ - with sat_azure.hammer_api_timeout(), sat_azure.skip_yum_update_during_provisioning( - template='Kickstart default finish' + with ( + sat_azure.hammer_api_timeout(), + sat_azure.skip_yum_update_during_provisioning(template='Kickstart default finish'), ): host = sat_azure.cli.Host.create( { @@ -472,8 +474,9 @@ def class_host_ud( Provisions the host on AzureRM using UserData template Later in tests this host will be used to perform assertions """ - with sat_azure.hammer_api_timeout(), sat_azure.skip_yum_update_during_provisioning( - template='Kickstart default user data' + with ( + sat_azure.hammer_api_timeout(), + sat_azure.skip_yum_update_during_provisioning(template='Kickstart default user data'), ): host = sat_azure.cli.Host.create( { diff --git a/tests/foreman/cli/test_computeresource_ec2.py b/tests/foreman/cli/test_computeresource_ec2.py index e4e1e669e6b..36c128bdee0 100644 --- a/tests/foreman/cli/test_computeresource_ec2.py +++ b/tests/foreman/cli/test_computeresource_ec2.py @@ -8,6 +8,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/cli/test_computeresource_libvirt.py b/tests/foreman/cli/test_computeresource_libvirt.py index 2a4720750a0..656a8ed32ca 100644 --- a/tests/foreman/cli/test_computeresource_libvirt.py +++ b/tests/foreman/cli/test_computeresource_libvirt.py @@ -28,6 +28,7 @@ :CaseImportance: High """ + import random from fauxfactory import gen_string, gen_url diff --git a/tests/foreman/cli/test_computeresource_osp.py b/tests/foreman/cli/test_computeresource_osp.py index ff6779056ae..1b80cae877f 100644 --- a/tests/foreman/cli/test_computeresource_osp.py +++ b/tests/foreman/cli/test_computeresource_osp.py @@ -10,6 +10,7 @@ :CaseImportance: High """ + from box import Box from fauxfactory import gen_string import pytest diff --git a/tests/foreman/cli/test_computeresource_rhev.py b/tests/foreman/cli/test_computeresource_rhev.py index 173b9c1c414..9e93125eb6b 100644 --- a/tests/foreman/cli/test_computeresource_rhev.py +++ b/tests/foreman/cli/test_computeresource_rhev.py @@ -10,6 +10,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest from wait_for import wait_for diff --git a/tests/foreman/cli/test_computeresource_vmware.py b/tests/foreman/cli/test_computeresource_vmware.py index b72a5fca5c5..da69418eecd 100644 --- a/tests/foreman/cli/test_computeresource_vmware.py +++ b/tests/foreman/cli/test_computeresource_vmware.py @@ -10,10 +10,10 @@ :CaseAutomation: Automated """ + from fauxfactory import gen_string import pytest from wait_for import wait_for -from wrapanapi import VMWareSystem from robottelo.config import settings from robottelo.constants import FOREMAN_PROVIDERS @@ -93,6 +93,7 @@ def test_positive_provision_end_to_end( module_vmware_hostgroup, provision_method, vmware, + vmwareclient, ): """Provision a host on vmware compute resource with the help of hostgroup. @@ -139,12 +140,7 @@ def test_positive_provision_end_to_end( hostname = f'{hostname}.{module_provisioning_sat.domain.name}' assert hostname == host['name'] # check if vm is created on vmware - vmware = VMWareSystem( - hostname=vmware.hostname, - username=settings.vmware.username, - password=settings.vmware.password, - ) - assert vmware.does_vm_exist(hostname) is True + assert vmwareclient.does_vm_exist(hostname) is True wait_for( lambda: sat.cli.Host.info({'name': hostname})['status']['build-status'] != 'Pending installation', diff --git a/tests/foreman/cli/test_container_management.py b/tests/foreman/cli/test_container_management.py index 17d0b0b4bd0..871993090ed 100644 --- a/tests/foreman/cli/test_container_management.py +++ b/tests/foreman/cli/test_container_management.py @@ -9,6 +9,7 @@ :CaseComponent: ContainerManagement-Content """ + from fauxfactory import gen_string import pytest from wait_for import wait_for diff --git a/tests/foreman/cli/test_contentaccess.py b/tests/foreman/cli/test_contentaccess.py index 0c59e5c70e6..5eefd3af5b0 100644 --- a/tests/foreman/cli/test_contentaccess.py +++ b/tests/foreman/cli/test_contentaccess.py @@ -9,6 +9,7 @@ :team: Phoenix-subscriptions """ + import time from nailgun import entities diff --git a/tests/foreman/cli/test_contentcredentials.py b/tests/foreman/cli/test_contentcredentials.py index 208812cb857..14d4ff50718 100644 --- a/tests/foreman/cli/test_contentcredentials.py +++ b/tests/foreman/cli/test_contentcredentials.py @@ -13,6 +13,7 @@ :CaseImportance: High """ + from tempfile import mkstemp from fauxfactory import gen_alphanumeric, gen_choice, gen_integer, gen_string diff --git a/tests/foreman/cli/test_contentview.py b/tests/foreman/cli/test_contentview.py index 9b261f57218..f9b6c62221d 100644 --- a/tests/foreman/cli/test_contentview.py +++ b/tests/foreman/cli/test_contentview.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import random from fauxfactory import gen_alphanumeric, gen_string @@ -3798,7 +3799,7 @@ def test_negative_user_with_no_create_view_cv_permissions(self, module_org, modu password = gen_alphanumeric() no_rights_user = module_target_sat.cli_factory.user({'password': password}) no_rights_user['password'] = password - org_id = module_target_sat.cli_factory.make_org(cached=True)['id'] + org_id = module_target_sat.cli_factory.make_org()['id'] for name in generate_strings_list(exclude_types=['cjk']): # test that user can't create with pytest.raises(CLIReturnCodeError): diff --git a/tests/foreman/cli/test_contentviewfilter.py b/tests/foreman/cli/test_contentviewfilter.py index 65ffeb940a5..0a198b2754c 100644 --- a/tests/foreman/cli/test_contentviewfilter.py +++ b/tests/foreman/cli/test_contentviewfilter.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import random from fauxfactory import gen_string diff --git a/tests/foreman/cli/test_discoveredhost.py b/tests/foreman/cli/test_discoveredhost.py index e578f35e1ac..4c81335e2af 100644 --- a/tests/foreman/cli/test_discoveredhost.py +++ b/tests/foreman/cli/test_discoveredhost.py @@ -9,6 +9,7 @@ :Team: Rocket """ + import pytest from wait_for import wait_for diff --git a/tests/foreman/cli/test_discoveryrule.py b/tests/foreman/cli/test_discoveryrule.py index 2d9059f196b..933b68538aa 100644 --- a/tests/foreman/cli/test_discoveryrule.py +++ b/tests/foreman/cli/test_discoveryrule.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from functools import partial import random diff --git a/tests/foreman/cli/test_docker.py b/tests/foreman/cli/test_docker.py index 10b1d1dd4fc..30f96d9212b 100644 --- a/tests/foreman/cli/test_docker.py +++ b/tests/foreman/cli/test_docker.py @@ -7,6 +7,7 @@ :CaseImportance: High """ + from random import choice, randint from fauxfactory import gen_string, gen_url diff --git a/tests/foreman/cli/test_domain.py b/tests/foreman/cli/test_domain.py index 32f81369592..d77829e3039 100644 --- a/tests/foreman/cli/test_domain.py +++ b/tests/foreman/cli/test_domain.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/cli/test_environment.py b/tests/foreman/cli/test_environment.py index 90e5b79e72a..422030f6af4 100644 --- a/tests/foreman/cli/test_environment.py +++ b/tests/foreman/cli/test_environment.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + from random import choice from fauxfactory import gen_alphanumeric, gen_string diff --git a/tests/foreman/cli/test_errata.py b/tests/foreman/cli/test_errata.py index 52eff37bda4..523bb545967 100644 --- a/tests/foreman/cli/test_errata.py +++ b/tests/foreman/cli/test_errata.py @@ -10,6 +10,7 @@ :CaseImportance: High """ + from datetime import date, datetime, timedelta from operator import itemgetter diff --git a/tests/foreman/cli/test_fact.py b/tests/foreman/cli/test_fact.py index ddaaa477944..7913d68287c 100644 --- a/tests/foreman/cli/test_fact.py +++ b/tests/foreman/cli/test_fact.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/cli/test_filter.py b/tests/foreman/cli/test_filter.py index 4108ce07d6b..3f05953d6ce 100644 --- a/tests/foreman/cli/test_filter.py +++ b/tests/foreman/cli/test_filter.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.exceptions import CLIReturnCodeError diff --git a/tests/foreman/cli/test_foremantask.py b/tests/foreman/cli/test_foremantask.py index bdce6e63a97..da49b88a289 100644 --- a/tests/foreman/cli/test_foremantask.py +++ b/tests/foreman/cli/test_foremantask.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest diff --git a/tests/foreman/cli/test_globalparam.py b/tests/foreman/cli/test_globalparam.py index 41bb8ca769b..6c1eaed9c07 100644 --- a/tests/foreman/cli/test_globalparam.py +++ b/tests/foreman/cli/test_globalparam.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + from functools import partial from fauxfactory import gen_string diff --git a/tests/foreman/cli/test_hammer.py b/tests/foreman/cli/test_hammer.py index aa3bfcf4aeb..bc78daa88c0 100644 --- a/tests/foreman/cli/test_hammer.py +++ b/tests/foreman/cli/test_hammer.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import io import json import re diff --git a/tests/foreman/cli/test_host.py b/tests/foreman/cli/test_host.py index 37864e0f763..7a88ea9c461 100644 --- a/tests/foreman/cli/test_host.py +++ b/tests/foreman/cli/test_host.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from random import choice import re @@ -1552,6 +1553,7 @@ def yum_security_plugin(katello_host_tools_host): @pytest.mark.e2e @pytest.mark.cli_katello_host_tools +@pytest.mark.rhel_ver_match('[^6].*') @pytest.mark.tier3 def test_positive_report_package_installed_removed( katello_host_tools_host, setup_custom_repo, target_sat @@ -1597,6 +1599,7 @@ def test_positive_report_package_installed_removed( @pytest.mark.cli_katello_host_tools +@pytest.mark.rhel_ver_match('[^6].*') @pytest.mark.tier3 def test_positive_package_applicability(katello_host_tools_host, setup_custom_repo, target_sat): """Ensure packages applicability is functioning properly @@ -1658,6 +1661,7 @@ def test_positive_package_applicability(katello_host_tools_host, setup_custom_re @pytest.mark.e2e @pytest.mark.cli_katello_host_tools +@pytest.mark.rhel_ver_match('[^6].*') @pytest.mark.pit_client @pytest.mark.pit_server @pytest.mark.tier3 @@ -1727,6 +1731,7 @@ def test_positive_erratum_applicability( @pytest.mark.cli_katello_host_tools +@pytest.mark.rhel_ver_match('[^6].*') @pytest.mark.tier3 def test_positive_apply_security_erratum(katello_host_tools_host, setup_custom_repo, target_sat): """Apply security erratum to a host diff --git a/tests/foreman/cli/test_hostcollection.py b/tests/foreman/cli/test_hostcollection.py index ce3426030b5..e01238e440b 100644 --- a/tests/foreman/cli/test_hostcollection.py +++ b/tests/foreman/cli/test_hostcollection.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from broker import Broker from fauxfactory import gen_string import pytest diff --git a/tests/foreman/cli/test_hostgroup.py b/tests/foreman/cli/test_hostgroup.py index febe94038f8..dee6662a950 100644 --- a/tests/foreman/cli/test_hostgroup.py +++ b/tests/foreman/cli/test_hostgroup.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_integer from nailgun import entities import pytest diff --git a/tests/foreman/cli/test_http_proxy.py b/tests/foreman/cli/test_http_proxy.py index 68ea95559ca..bc7e07b0cd1 100644 --- a/tests/foreman/cli/test_http_proxy.py +++ b/tests/foreman/cli/test_http_proxy.py @@ -11,6 +11,7 @@ :CaseAutomation: Automated """ + from fauxfactory import gen_integer, gen_string, gen_url import pytest diff --git a/tests/foreman/cli/test_installer.py b/tests/foreman/cli/test_installer.py index cf0eb838ee0..a31ad211237 100644 --- a/tests/foreman/cli/test_installer.py +++ b/tests/foreman/cli/test_installer.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest pytestmark = [pytest.mark.stubbed] diff --git a/tests/foreman/cli/test_jobtemplate.py b/tests/foreman/cli/test_jobtemplate.py index 66553d2c54d..8fb80120384 100644 --- a/tests/foreman/cli/test_jobtemplate.py +++ b/tests/foreman/cli/test_jobtemplate.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/cli/test_ldapauthsource.py b/tests/foreman/cli/test_ldapauthsource.py index ce7b625c3a6..f43539161bf 100644 --- a/tests/foreman/cli/test_ldapauthsource.py +++ b/tests/foreman/cli/test_ldapauthsource.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string from nailgun import entities import pytest @@ -113,7 +114,7 @@ def test_positive_refresh_usergroup_with_ad(self, member_group, ad_data, module_ 'attr-firstname': LDAP_ATTR['firstname'], 'attr-lastname': LDAP_ATTR['surname'], 'attr-mail': LDAP_ATTR['mail'], - 'account': fr"{ad_data['workgroup']}\{ad_data['ldap_user_name']}", + 'account': rf"{ad_data['workgroup']}\{ad_data['ldap_user_name']}", 'account-password': ad_data['ldap_user_passwd'], 'base-dn': ad_data['base_dn'], } diff --git a/tests/foreman/cli/test_leapp_client.py b/tests/foreman/cli/test_leapp_client.py index 4ea36bbf37a..ff21a28837a 100644 --- a/tests/foreman/cli/test_leapp_client.py +++ b/tests/foreman/cli/test_leapp_client.py @@ -11,7 +11,9 @@ :CaseAutomation: Automated """ + from broker import Broker +from fauxfactory import gen_string import pytest from robottelo.config import settings @@ -190,7 +192,7 @@ def custom_leapp_host(upgrade_path, module_target_sat, module_sca_manifest_org, @pytest.fixture def precondition_check_upgrade_and_install_leapp_tool(custom_leapp_host): """Clean-up directory if in-place upgrade already performed, - set rhel release version, update system and install leapp-upgrade""" + set rhel release version, update system and install leapp-upgrade and fix known inhibitors before upgrade""" source_rhel = custom_leapp_host.os_version.base_version custom_leapp_host.run('rm -rf /root/tmp_leapp_py3') custom_leapp_host.run('yum repolist') @@ -199,6 +201,20 @@ def precondition_check_upgrade_and_install_leapp_tool(custom_leapp_host): assert custom_leapp_host.run('yum install leapp-upgrade -y').status == 0 if custom_leapp_host.run('needs-restarting -r').status == 1: custom_leapp_host.power_control(state='reboot', ensure=True) + if custom_leapp_host.os_version.major == 8: + # Inhibitor - Firewalld Configuration AllowZoneDrifting Is Unsupported + assert ( + custom_leapp_host.run( + 'sed -i "s/^AllowZoneDrifting=.*/AllowZoneDrifting=no/" /etc/firewalld/firewalld.conf' + ).status + == 0 + ) + assert ( + custom_leapp_host.run( + 'echo -e "\nPermitRootLogin yes" >> /etc/ssh/sshd_config; systemctl restart sshd' + ).status + == 0 + ) @pytest.mark.parametrize( @@ -233,12 +249,6 @@ def test_leapp_upgrade_rhel( :expectedresults: 1. Update RHEL OS major version to another major version """ - # Fixing known inhibitors for source rhel version 8 - if custom_leapp_host.os_version.major == 8: - # Inhibitor - Firewalld Configuration AllowZoneDrifting Is Unsupported - custom_leapp_host.run( - 'sed -i "s/^AllowZoneDrifting=.*/AllowZoneDrifting=no/" /etc/firewalld/firewalld.conf' - ) # Run LEAPP-PREUPGRADE Job Template- template_id = ( module_target_sat.api.JobTemplate() @@ -285,3 +295,86 @@ def test_leapp_upgrade_rhel( custom_leapp_host.clean_cached_properties() new_ver = str(custom_leapp_host.os_version) assert new_ver == upgrade_path['target_version'] + + +@pytest.mark.parametrize( + 'upgrade_path', + [ + {'source_version': RHEL8_VER, 'target_version': RHEL9_VER}, + ], + ids=lambda upgrade_path: f'{upgrade_path["source_version"]}' + f'_to_{upgrade_path["target_version"]}', +) +def test_leapp_upgrade_rhel_non_admin( + module_target_sat, + module_sca_manifest_org, + default_location, + custom_leapp_host, + upgrade_path, + verify_target_repo_on_satellite, + precondition_check_upgrade_and_install_leapp_tool, +): + """Test to upgrade RHEL host to next major RHEL release using leapp preupgrade and leapp upgrade + job templates + + :id: afd295ca-4b0e-439f-b880-ae92c300fd9f + + :BZ: 2257302 + + :customerscenario: true + + :steps: + 1. Import a subscription manifest and enable, sync source & target repositories + 2. Create LCE, Create CV, add repositories to it, publish and promote CV, Create AK, etc. + 3. Register content host with AK + 4. Verify that target rhel repositories are enabled on Satellite + 5. Update all packages, install leapp tool and fix inhibitors + 6. Create a non-admin user with "Organization admin", "Remote Execution Manager" and "Remote Execution User" role assigned to it. + 7. Run Leapp Preupgrade and Leapp Upgrade job template from the user created in step 6. + + :expectedresults: + 1. Update RHEL OS major version to another major version from non-admin user role. + """ + login = gen_string('alpha') + password = gen_string('alpha') + roles = ['Organization admin', 'Remote Execution Manager', 'Remote Execution User'] + org = module_sca_manifest_org + user = module_target_sat.cli_factory.user( + { + 'admin': False, + 'login': login, + 'password': password, + 'organization-ids': org.id, + 'location-ids': default_location.id, + } + ) + for role in roles: + module_target_sat.cli.User.add_role({'id': user['id'], 'login': login, 'role': role}) + + # Run leapp preupgrade job + invocation_command = module_target_sat.cli_factory.job_invocation_with_credentials( + { + 'job-template': 'Run preupgrade via Leapp', + 'search-query': f'name = {custom_leapp_host.hostname}', + 'organization-id': org.id, + 'location-id': default_location.id, + }, + (login, password), + ) + result = module_target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) + assert result['success'] == '1' + + # Run leapp upgrade job + invocation_command = module_target_sat.cli_factory.job_invocation_with_credentials( + { + 'job-template': 'Run upgrade via Leapp', + 'search-query': f'name = {custom_leapp_host.hostname}', + 'organization-id': org.id, + 'location-id': default_location.id, + 'inputs': 'Reboot=false', + }, + (login, password), + ) + custom_leapp_host.power_control(state='reboot') + result = module_target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) + assert result['success'] == '1' diff --git a/tests/foreman/cli/test_lifecycleenvironment.py b/tests/foreman/cli/test_lifecycleenvironment.py index 75bf12c681b..294753e04e8 100644 --- a/tests/foreman/cli/test_lifecycleenvironment.py +++ b/tests/foreman/cli/test_lifecycleenvironment.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from math import ceil from fauxfactory import gen_string diff --git a/tests/foreman/cli/test_location.py b/tests/foreman/cli/test_location.py index 9f835c1c581..eff9220f149 100644 --- a/tests/foreman/cli/test_location.py +++ b/tests/foreman/cli/test_location.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/cli/test_logging.py b/tests/foreman/cli/test_logging.py index 8d315b3e59c..620f2f0cceb 100644 --- a/tests/foreman/cli/test_logging.py +++ b/tests/foreman/cli/test_logging.py @@ -11,6 +11,7 @@ :CaseImportance: Medium """ + import re from fauxfactory import gen_string @@ -243,7 +244,7 @@ def test_positive_logging_from_pulp3(module_org, target_sat): target_sat.cli.Repository.synchronize({'id': repo['id']}) # Get the id of repository sync from task task_out = target_sat.execute( - "hammer task list | grep -F \'Synchronize repository {\"text\"=>\"repository\'" + "hammer task list | grep -F 'Synchronize repository {\"text\"=>\"repository'" ).stdout.splitlines()[0][:8] prod_log_out = target_sat.execute(f'grep {task_out} {source_log}').stdout.splitlines()[0] # Get correlation id of pulp from production logs diff --git a/tests/foreman/cli/test_medium.py b/tests/foreman/cli/test_medium.py index 26017b415ab..3e3b037ba2d 100644 --- a/tests/foreman/cli/test_medium.py +++ b/tests/foreman/cli/test_medium.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_alphanumeric import pytest diff --git a/tests/foreman/cli/test_model.py b/tests/foreman/cli/test_model.py index 8fc51be9f62..f60a6c59fc6 100644 --- a/tests/foreman/cli/test_model.py +++ b/tests/foreman/cli/test_model.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest @@ -37,7 +38,7 @@ def class_model(self, target_sat): ('name', 'new_name'), **parametrized( list(zip(valid_data_list().values(), valid_data_list().values(), strict=True)) - ) + ), ) def test_positive_crud_with_name(self, name, new_name, module_target_sat): """Successfully creates, updates and deletes a Model. diff --git a/tests/foreman/cli/test_operatingsystem.py b/tests/foreman/cli/test_operatingsystem.py index 48ed8af8099..615ffb9990f 100644 --- a/tests/foreman/cli/test_operatingsystem.py +++ b/tests/foreman/cli/test_operatingsystem.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_alphanumeric, gen_string import pytest diff --git a/tests/foreman/cli/test_organization.py b/tests/foreman/cli/test_organization.py index da79144f141..eded57e07ef 100644 --- a/tests/foreman/cli/test_organization.py +++ b/tests/foreman/cli/test_organization.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/cli/test_oscap.py b/tests/foreman/cli/test_oscap.py index c198e5fa636..ea0f15318a1 100644 --- a/tests/foreman/cli/test_oscap.py +++ b/tests/foreman/cli/test_oscap.py @@ -11,6 +11,7 @@ :CaseAutomation: Automated """ + from fauxfactory import gen_string from nailgun import entities import pytest @@ -67,8 +68,7 @@ def test_positive_list_default_content_with_admin(self, module_target_sat): :CaseImportance: Medium """ scap_contents = [content['title'] for content in module_target_sat.cli.Scapcontent.list()] - for title in OSCAP_DEFAULT_CONTENT.values(): - assert title in scap_contents + assert f'Red Hat rhel{module_target_sat.os_version.major} default content' in scap_contents @pytest.mark.tier1 def test_negative_list_default_content_with_viewer_role( @@ -295,7 +295,9 @@ def test_positive_create_scap_content_with_valid_originalfile_name( @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier1 def test_negative_create_scap_content_with_invalid_originalfile_name( - self, name, module_target_sat + self, + name, + module_target_sat, ): """Create scap-content with invalid original file name @@ -354,7 +356,10 @@ def test_negative_create_scap_content_without_dsfile(self, title, module_target_ module_target_sat.cli_factory.scapcontent({'title': title}) @pytest.mark.tier1 - def test_positive_update_scap_content_with_newtitle(self, module_target_sat): + def test_positive_update_scap_content_with_newtitle( + self, + module_target_sat, + ): """Update scap content title :id: 2c32e94a-237d-40b9-8a3b-fca2ef26fe79 @@ -920,7 +925,8 @@ def test_positive_update_scap_policy_with_content(self, scap_content, module_tar ) assert scap_policy['scap-content-id'] == scap_content["scap_id"] scap_id, scap_profile_id = self.fetch_scap_and_profile_id( - OSCAP_DEFAULT_CONTENT['rhel_firefox'], module_target_sat + OSCAP_DEFAULT_CONTENT[f'rhel{module_target_sat.os_version.major}_content'], + module_target_sat, ) module_target_sat.cli.Scappolicy.update( {'name': name, 'scap-content-id': scap_id, 'scap-content-profile-id': scap_profile_id} diff --git a/tests/foreman/cli/test_oscap_tailoringfiles.py b/tests/foreman/cli/test_oscap_tailoringfiles.py index e95cfb2891a..127ba051e40 100644 --- a/tests/foreman/cli/test_oscap_tailoringfiles.py +++ b/tests/foreman/cli/test_oscap_tailoringfiles.py @@ -11,6 +11,7 @@ :CaseAutomation: Automated """ + from fauxfactory import gen_string import pytest @@ -185,7 +186,6 @@ def test_negative_associate_tailoring_file_with_different_scap(self): @pytest.mark.skip_if_open("BZ:1857572") @pytest.mark.tier2 def test_positive_download_tailoring_file(self, tailoring_file_path, target_sat): - """Download the tailoring file from satellite :id: 75d8c810-19a7-4285-bc3a-a1fb1a0e9088 diff --git a/tests/foreman/cli/test_ostreebranch.py b/tests/foreman/cli/test_ostreebranch.py index d768b3f2d70..0d885528a04 100644 --- a/tests/foreman/cli/test_ostreebranch.py +++ b/tests/foreman/cli/test_ostreebranch.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import random from nailgun import entities diff --git a/tests/foreman/cli/test_partitiontable.py b/tests/foreman/cli/test_partitiontable.py index 35e52cbbe65..e89af27e163 100644 --- a/tests/foreman/cli/test_partitiontable.py +++ b/tests/foreman/cli/test_partitiontable.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from random import randint from fauxfactory import gen_string @@ -53,7 +54,7 @@ def test_positive_create_with_one_character_name(self, name, target_sat): strict=True, ) ) - ) + ), ) def test_positive_crud_with_name(self, name, new_name, module_target_sat): """Create, read, update and delete Partition Tables with different names @@ -86,23 +87,9 @@ def test_positive_create_with_content(self, module_target_sat): :CaseImportance: Critical """ content = 'Fake ptable' - ptable = module_target_sat.cli_factory.make_partition_table({'content': content}) - ptable_content = module_target_sat.cli.PartitionTable().dump({'id': ptable['id']}) - assert content in ptable_content - - @pytest.mark.tier1 - @pytest.mark.upgrade - def test_positive_create_with_content_length(self, module_target_sat): - """Create a Partition Table with content length more than 4096 chars - - :id: 59e6f9ef-85c2-4229-8831-00edb41b19f4 - - :expectedresults: Partition Table is created and has correct content - - :BZ: 1270181 - """ - content = gen_string('alpha', 5000) - ptable = module_target_sat.cli_factory.make_partition_table({'content': content}) + filename = gen_string('alpha', 10) + module_target_sat.execute(f'echo {content} > {filename}') + ptable = module_target_sat.cli_factory.make_partition_table({'file': filename}) ptable_content = module_target_sat.cli.PartitionTable().dump({'id': ptable['id']}) assert content in ptable_content diff --git a/tests/foreman/cli/test_ping.py b/tests/foreman/cli/test_ping.py index 0a288143233..1a1471d5d2f 100644 --- a/tests/foreman/cli/test_ping.py +++ b/tests/foreman/cli/test_ping.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import pytest pytestmark = [pytest.mark.tier1, pytest.mark.upgrade] diff --git a/tests/foreman/cli/test_product.py b/tests/foreman/cli/test_product.py index 3d413209db3..ec81348a850 100644 --- a/tests/foreman/cli/test_product.py +++ b/tests/foreman/cli/test_product.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_alphanumeric, gen_integer, gen_string, gen_url import pytest diff --git a/tests/foreman/cli/test_provisioning.py b/tests/foreman/cli/test_provisioning.py index 1c7799edc53..21f937dac24 100644 --- a/tests/foreman/cli/test_provisioning.py +++ b/tests/foreman/cli/test_provisioning.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import pytest diff --git a/tests/foreman/cli/test_provisioningtemplate.py b/tests/foreman/cli/test_provisioningtemplate.py index 59aabc2d2c4..a27b6fe534d 100644 --- a/tests/foreman/cli/test_provisioningtemplate.py +++ b/tests/foreman/cli/test_provisioningtemplate.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import random from random import randint diff --git a/tests/foreman/cli/test_puppetclass.py b/tests/foreman/cli/test_puppetclass.py index 3a8b50856bb..a3dcd8837f9 100644 --- a/tests/foreman/cli/test_puppetclass.py +++ b/tests/foreman/cli/test_puppetclass.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/cli/test_realm.py b/tests/foreman/cli/test_realm.py index c05dc1291f8..4cae5e847b9 100644 --- a/tests/foreman/cli/test_realm.py +++ b/tests/foreman/cli/test_realm.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import random from fauxfactory import gen_string diff --git a/tests/foreman/cli/test_registration.py b/tests/foreman/cli/test_registration.py index 89d560ab862..2eadf114110 100644 --- a/tests/foreman/cli/test_registration.py +++ b/tests/foreman/cli/test_registration.py @@ -11,6 +11,10 @@ :Team: Rocket """ + +import re + +from fauxfactory import gen_string import pytest from robottelo.config import settings @@ -167,6 +171,46 @@ def test_negative_register_twice(module_ak_with_cv, module_org, rhel_contenthost assert 'This system is already registered' in str(result.stderr) +@pytest.mark.rhel_ver_match('[^6]') +@pytest.mark.tier3 +def test_positive_force_register_twice(module_ak_with_cv, module_org, rhel_contenthost, target_sat): + """Register a host twice to Satellite, with force=true + + :id: 7ccd4efd-54bb-4207-9acf-4c6243a32fab + + :expectedresults: Host will be re-registered + + :parametrized: yes + + :BZ: 1361309 + + :customerscenario: true + """ + reg_id_pattern = r"The system has been registered with ID: ([^\n]*)" + name = gen_string('alpha') + ".example.com" + rhel_contenthost.execute(f'hostnamectl set-hostname {name}') + result = rhel_contenthost.register(module_org, None, module_ak_with_cv.name, target_sat) + reg_id_old = re.search(reg_id_pattern, result.stdout).group(1) + assert result.status == 0 + assert rhel_contenthost.subscribed + result = rhel_contenthost.register( + module_org, None, module_ak_with_cv.name, target_sat, force=True + ) + assert result.status == 0 + assert rhel_contenthost.subscribed + assert f'Unregistering from: {target_sat.hostname}' in str(result.stdout) + assert f'The registered system name is: {rhel_contenthost.hostname}' in str(result.stdout) + reg_id_new = re.search(reg_id_pattern, result.stdout).group(1) + assert f'The system has been registered with ID: {reg_id_new}' in str(result.stdout) + assert reg_id_new != reg_id_old + assert ( + target_sat.cli.Host.info({'name': rhel_contenthost.hostname}, output_format='json')[ + 'subscription-information' + ]['uuid'] + == reg_id_new + ) + + @pytest.mark.tier1 def test_negative_global_registration_without_ak(module_target_sat): """Attempt to register a host without ActivationKey diff --git a/tests/foreman/cli/test_remoteexecution.py b/tests/foreman/cli/test_remoteexecution.py index 1b3bca14dc0..f8cfcf3c0cb 100644 --- a/tests/foreman/cli/test_remoteexecution.py +++ b/tests/foreman/cli/test_remoteexecution.py @@ -11,11 +11,11 @@ :CaseImportance: High """ + from calendar import monthrange from datetime import datetime, timedelta from time import sleep -from broker import Broker from dateutil.relativedelta import FR, relativedelta from fauxfactory import gen_string import pytest @@ -23,29 +23,9 @@ from robottelo.cli.host import Host from robottelo.config import settings -from robottelo.constants import PRDS, REPOS, REPOSET -from robottelo.hosts import ContentHost from robottelo.utils import ohsnap -@pytest.fixture -def fixture_sca_vmsetup(request, module_sca_manifest_org, target_sat): - """Create VM and register content host to Simple Content Access organization""" - if '_count' in request.param: - with Broker( - nick=request.param['nick'], - host_class=ContentHost, - _count=request.param['_count'], - ) as clients: - for client in clients: - client.configure_rex(satellite=target_sat, org=module_sca_manifest_org) - yield clients - else: - with Broker(nick=request.param['nick'], host_class=ContentHost) as client: - client.configure_rex(satellite=target_sat, org=module_sca_manifest_org) - yield client - - @pytest.fixture def infra_host(request, target_sat, module_capsule_configured): infra_hosts = {'target_sat': target_sat, 'module_capsule_configured': module_capsule_configured} @@ -214,10 +194,9 @@ def test_positive_run_custom_job_template_by_ip(self, rex_contenthost, module_or @pytest.mark.tier3 @pytest.mark.upgrade - @pytest.mark.no_containers @pytest.mark.rhel_ver_list([8]) def test_positive_run_default_job_template_multiple_hosts_by_ip( - self, registered_hosts, module_target_sat + self, rex_contenthosts, module_target_sat ): """Run default job template against multiple hosts by ip @@ -227,7 +206,7 @@ def test_positive_run_default_job_template_multiple_hosts_by_ip( :parametrized: yes """ - clients = registered_hosts + clients = rex_contenthosts invocation_command = module_target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', @@ -519,402 +498,21 @@ def test_recurring_with_unreachable_host(self, module_target_sat, rhel_contentho assert cli.JobInvocation.info({'id': invocation.id})['failed'] != '0' -class TestAnsibleREX: - """Test class for remote execution via Ansible""" - - @pytest.mark.tier3 - @pytest.mark.upgrade - @pytest.mark.pit_client - @pytest.mark.pit_server - @pytest.mark.rhel_ver_list([7, 8, 9]) - def test_positive_run_effective_user_job(self, rex_contenthost, target_sat): - """Tests Ansible REX job having effective user runs successfully - - :id: a5fa20d8-c2bd-4bbf-a6dc-bf307b59dd8c - - :steps: - - 0. Create a VM and register to SAT and prepare for REX (ssh key) - - 1. Run Ansible Command job for the host to create a user - - 2. Run Ansible Command job using effective user - - 3. Check the job result at the host is done under that user - - :expectedresults: multiple asserts along the code - - :CaseAutomation: Automated - - :parametrized: yes - """ - client = rex_contenthost - # create a user on client via remote job - username = gen_string('alpha') - filename = gen_string('alpha') - make_user_job = target_sat.cli_factory.job_invocation( - { - 'job-template': 'Run Command - Ansible Default', - 'inputs': f"command=useradd -m {username}", - 'search-query': f"name ~ {client.hostname}", - } - ) - assert_job_invocation_result(target_sat, make_user_job['id'], client.hostname) - # create a file as new user - invocation_command = target_sat.cli_factory.job_invocation( - { - 'job-template': 'Run Command - Ansible Default', - 'inputs': f"command=touch /home/{username}/{filename}", - 'search-query': f"name ~ {client.hostname}", - 'effective-user': f'{username}', - } - ) - assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) - # check the file owner - result = client.execute( - f'''stat -c '%U' /home/{username}/{filename}''', - ) - # assert the file is owned by the effective user - assert username == result.stdout.strip('\n'), "file ownership mismatch" - - @pytest.mark.tier3 - @pytest.mark.upgrade - @pytest.mark.rhel_ver_list([8]) - def test_positive_run_reccuring_job(self, rex_contenthost, target_sat): - """Tests Ansible REX reccuring job runs successfully multiple times - - :id: 49b0d31d-58f9-47f1-aa5d-561a1dcb0d66 - - :steps: - - 0. Create a VM and register to SAT and prepare for REX (ssh key) - - 1. Run recurring Ansible Command job for the host - - 2. Check the multiple job results at the host - - :expectedresults: multiple asserts along the code - - :CaseAutomation: Automated - - :customerscenario: true - - :bz: 2129432 - - :parametrized: yes - """ - client = rex_contenthost - invocation_command = target_sat.cli_factory.job_invocation( - { - 'job-template': 'Run Command - Ansible Default', - 'inputs': 'command=ls', - 'search-query': f"name ~ {client.hostname}", - 'cron-line': '* * * * *', # every minute - 'max-iteration': 2, # just two runs - } - ) - result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) - sleep(150) - rec_logic = target_sat.cli.RecurringLogic.info({'id': result['recurring-logic-id']}) - assert rec_logic['state'] == 'finished' - assert rec_logic['iteration'] == '2' - # 2129432 - rec_logic_keys = rec_logic.keys() - assert 'action' in rec_logic_keys - assert 'last-occurrence' in rec_logic_keys - assert 'next-occurrence' in rec_logic_keys - assert 'state' in rec_logic_keys - assert 'purpose' in rec_logic_keys - assert 'iteration' in rec_logic_keys - assert 'iteration-limit' in rec_logic_keys - - @pytest.mark.tier3 - @pytest.mark.no_containers - def test_positive_run_concurrent_jobs(self, registered_hosts, target_sat): - """Tests Ansible REX concurent jobs without batch trigger - - :id: ad0f108c-03f2-49c7-8732-b1056570567b - - :steps: - - 0. Create 2 hosts, disable foreman_tasks_proxy_batch_trigger - - 1. Run Ansible Command job with concurrency-setting - - :expectedresults: multiple asserts along the code - - :CaseAutomation: Automated - - :customerscenario: true - - :BZ: 1817320 - - :parametrized: yes - """ - param_name = 'foreman_tasks_proxy_batch_trigger' - target_sat.cli.GlobalParameter().set({'name': param_name, 'value': 'false'}) - clients = registered_hosts - output_msgs = [] - invocation_command = target_sat.cli_factory.job_invocation( - { - 'job-template': 'Run Command - Ansible Default', - 'inputs': 'command=ls', - 'search-query': f'name ~ {clients[0].hostname} or name ~ {clients[1].hostname}', - 'concurrency-level': 2, - } - ) - for vm in clients: - output_msgs.append( - 'host output from {}: {}'.format( - vm.hostname, - ' '.join( - target_sat.cli.JobInvocation.get_output( - {'id': invocation_command['id'], 'host': vm.hostname} - ) - ), - ) - ) - result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) - assert result['success'] == '2', output_msgs - target_sat.cli.GlobalParameter().delete({'name': param_name}) - assert len(target_sat.cli.GlobalParameter().list({'search': param_name})) == 0 - - @pytest.mark.tier3 - @pytest.mark.no_containers - def test_positive_run_serial(self, registered_hosts, target_sat): - """Tests subtasks in a job run one by one when concurrency level set to 1 - - :id: 5ce39447-82d0-42df-81be-16ed3d67a2a4 - - :Setup: - 0. Create 2 hosts - - :steps: - - 0. Run a bash command job with concurrency level 1 - - :expectedresults: First subtask should run immediately, second one after the first one finishes - - :CaseAutomation: Automated - - :parametrized: yes - """ - hosts = registered_hosts - output_msgs = [] - template_file = f"/root/{gen_string('alpha')}.template" - target_sat.execute( - f"echo 'rm /root/test-<%= @host %>; echo $(date +%s) >> /root/test-<%= @host %>; sleep 120; echo $(date +%s) >> /root/test-<%= @host %>' > {template_file}" - ) - template = target_sat.cli.JobTemplate.create( - { - 'name': gen_string('alpha'), - 'file': template_file, - 'job-category': 'Commands', - 'provider-type': 'script', - } - ) - invocation = target_sat.cli_factory.job_invocation( - { - 'job-template': template['name'], - 'search-query': f'name ~ {hosts[0].hostname} or name ~ {hosts[1].hostname}', - 'concurrency-level': 1, - } - ) - for vm in hosts: - output_msgs.append( - 'host output from {}: {}'.format( - vm.hostname, - ' '.join( - target_sat.cli.JobInvocation.get_output( - {'id': invocation['id'], 'host': vm.hostname} - ) - ), - ) - ) - result = target_sat.cli.JobInvocation.info({'id': invocation['id']}) - assert result['success'] == '2', output_msgs - # assert for time diffs - file1 = hosts[0].execute('cat /root/test-$(hostname)').stdout - file2 = hosts[1].execute('cat /root/test-$(hostname)').stdout - file1_start, file1_end = map(int, file1.rstrip().split('\n')) - file2_start, file2_end = map(int, file2.rstrip().split('\n')) - if file1_start > file2_start: - file1_start, file1_end, file2_start, file2_end = ( - file2_start, - file2_end, - file1_start, - file1_end, - ) - assert file1_end - file1_start >= 120 - assert file2_end - file2_start >= 120 - assert file2_start >= file1_end # the jobs did NOT run concurrently - - @pytest.mark.tier3 - @pytest.mark.upgrade - @pytest.mark.e2e - @pytest.mark.no_containers - @pytest.mark.pit_server - @pytest.mark.rhel_ver_match('[^6].*') - @pytest.mark.skipif( - (not settings.robottelo.repos_hosting_url), reason='Missing repos_hosting_url' - ) - def test_positive_run_packages_and_services_job( - self, rhel_contenthost, module_org, module_ak_with_cv, target_sat - ): - """Tests Ansible REX job can install packages and start services - - :id: 47ed82fb-77ca-43d6-a52e-f62bae5d3a42 - - :steps: - - 0. Create a VM and register to SAT and prepare for REX (ssh key) - - 1. Run Ansible Package job for the host to install a package - - 2. Check the package is present at the host - - 3. Run Ansible Service job for the host to start a service - - 4. Check the service is started on the host - - :expectedresults: multiple asserts along the code - - :CaseAutomation: Automated - - :bz: 1872688, 1811166 - - :CaseImportance: Critical - - :customerscenario: true - - :parametrized: yes - """ - client = rhel_contenthost - packages = ['tapir'] - client.register( - module_org, - None, - module_ak_with_cv.name, - target_sat, - repo=settings.repos.yum_3.url, - ) - # install package - invocation_command = target_sat.cli_factory.job_invocation( - { - 'job-template': 'Package Action - Ansible Default', - 'inputs': 'state=latest, name={}'.format(*packages), - 'search-query': f'name ~ {client.hostname}', - } - ) - assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) - result = client.run(f'rpm -q {" ".join(packages)}') - assert result.status == 0 - - # stop a service - service = "rsyslog" - invocation_command = target_sat.cli_factory.job_invocation( - { - 'job-template': 'Service Action - Ansible Default', - 'inputs': f'state=stopped, name={service}', - 'search-query': f"name ~ {client.hostname}", - } - ) - assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) - result = client.execute(f'systemctl status {service}') - assert result.status == 3 - - # start it again - invocation_command = target_sat.cli_factory.job_invocation( - { - 'job-template': 'Service Action - Ansible Default', - 'inputs': f'state=started, name={service}', - 'search-query': f'name ~ {client.hostname}', - } - ) - assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) - result = client.execute(f'systemctl status {service}') - assert result.status == 0 - - @pytest.mark.tier3 - @pytest.mark.parametrize( - 'fixture_sca_vmsetup', [{'nick': 'rhel8'}], ids=['rhel8'], indirect=True - ) - def test_positive_install_ansible_collection( - self, fixture_sca_vmsetup, module_sca_manifest_org, target_sat - ): - """Test whether Ansible collection can be installed via REX - - :steps: - 1. Upload a manifest. - 2. Enable and sync Ansible repository. - 3. Register content host to Satellite. - 4. Enable Ansible repo on content host. - 5. Install ansible package. - 6. Run REX job to install Ansible collection on content host. - - :id: ad25aee5-4ea3-4743-a301-1c6271856f79 - - :CaseComponent: Ansible-RemoteExecution - - :Team: Rocket - """ - # Configure repository to prepare for installing ansible on host - target_sat.cli.RepositorySet.enable( - { - 'basearch': 'x86_64', - 'name': REPOSET['rhae2.9_el8'], - 'organization-id': module_sca_manifest_org.id, - 'product': PRDS['rhae'], - 'releasever': '8', - } - ) - target_sat.cli.Repository.synchronize( - { - 'name': REPOS['rhae2.9_el8']['name'], - 'organization-id': module_sca_manifest_org.id, - 'product': PRDS['rhae'], - } - ) - client = fixture_sca_vmsetup - client.execute('subscription-manager refresh') - client.execute(f'subscription-manager repos --enable {REPOS["rhae2.9_el8"]["id"]}') - client.execute('dnf -y install ansible') - collection_job = target_sat.cli_factory.job_invocation( - { - 'job-template': 'Ansible Collection - Install from Galaxy', - 'inputs': 'ansible_collections_list="oasis_roles.system"', - 'search-query': f'name ~ {client.hostname}', - } - ) - result = target_sat.cli.JobInvocation.info({'id': collection_job['id']}) - assert result['success'] == '1' - collection_path = client.execute('ls /etc/ansible/collections/ansible_collections').stdout - assert 'oasis_roles' in collection_path - - # Extend test with custom collections_path advanced input field - collection_job = target_sat.cli_factory.job_invocation( - { - 'job-template': 'Ansible Collection - Install from Galaxy', - 'inputs': 'ansible_collections_list="oasis_roles.system", collections_path="~/"', - 'search-query': f'name ~ {client.hostname}', - } - ) - result = target_sat.cli.JobInvocation.info({'id': collection_job['id']}) - assert result['success'] == '1' - collection_path = client.execute('ls ~/ansible_collections').stdout - assert 'oasis_roles' in collection_path - - class TestRexUsers: """Tests related to remote execution users""" @pytest.fixture(scope='class') - def class_rexmanager_user(self, module_org, class_target_sat): + def class_rexmanager_user(self, module_org, default_location, class_target_sat): """Creates a user with Remote Execution Manager role""" password = gen_string('alpha') rexmanager = gen_string('alpha') class_target_sat.cli_factory.user( - {'login': rexmanager, 'password': password, 'organization-ids': module_org.id} + { + 'login': rexmanager, + 'password': password, + 'organization-ids': module_org.id, + 'location-ids': default_location.id, + } ) class_target_sat.cli.User.add_role( {'login': rexmanager, 'role': 'Remote Execution Manager'} @@ -922,12 +520,17 @@ def class_rexmanager_user(self, module_org, class_target_sat): return (rexmanager, password) @pytest.fixture(scope='class') - def class_rexinfra_user(self, module_org, class_target_sat): + def class_rexinfra_user(self, module_org, default_location, class_target_sat): """Creates a user with all Remote Execution related permissions""" password = gen_string('alpha') rexinfra = gen_string('alpha') class_target_sat.cli_factory.user( - {'login': rexinfra, 'password': password, 'organization-ids': module_org.id} + { + 'login': rexinfra, + 'password': password, + 'organization-ids': module_org.id, + 'location-ids': default_location.id, + } ) role = class_target_sat.cli_factory.make_role({'organization-ids': module_org.id}) invocation_permissions = [ diff --git a/tests/foreman/cli/test_report.py b/tests/foreman/cli/test_report.py index ea4f14371bb..588346f9431 100644 --- a/tests/foreman/cli/test_report.py +++ b/tests/foreman/cli/test_report.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import random import pytest diff --git a/tests/foreman/cli/test_reporttemplates.py b/tests/foreman/cli/test_reporttemplates.py index a27123201f1..5f35f28af76 100644 --- a/tests/foreman/cli/test_reporttemplates.py +++ b/tests/foreman/cli/test_reporttemplates.py @@ -10,6 +10,7 @@ :CaseImportance: High """ + from broker import Broker from fauxfactory import gen_alpha import pytest diff --git a/tests/foreman/cli/test_repositories.py b/tests/foreman/cli/test_repositories.py index cf6e10db5c0..f7f4fdafdb9 100644 --- a/tests/foreman/cli/test_repositories.py +++ b/tests/foreman/cli/test_repositories.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import pytest from requests.exceptions import HTTPError @@ -116,7 +117,7 @@ def test_positive_disable_rh_repo_with_basearch(module_target_sat, module_entitl disabled_repo = module_target_sat.cli.RepositorySet.disable( { 'basearch': DEFAULT_ARCHITECTURE, - 'name': REPOSET['kickstart']['rhel8'], + 'name': REPOSET['kickstart']['rhel8_bos'], 'product-id': repo.product.id, 'organization-id': module_entitlement_manifest_org.id, 'releasever': REPOS['kickstart']['rhel8_aps']['version'], diff --git a/tests/foreman/cli/test_repository.py b/tests/foreman/cli/test_repository.py index 466ffbb8319..364315ba197 100644 --- a/tests/foreman/cli/test_repository.py +++ b/tests/foreman/cli/test_repository.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from random import choice from string import punctuation @@ -41,7 +42,6 @@ CUSTOM_FILE_REPO, CUSTOM_RPM_SHA, FAKE_5_YUM_REPO, - FAKE_YUM_DRPM_REPO, FAKE_YUM_MD5_REPO, FAKE_YUM_SRPM_REPO, ) @@ -2010,8 +2010,7 @@ def test_positive_accessible_content_status( :CaseImportance: Critical """ - rhel7_contenthost.install_katello_ca(target_sat) - rhel7_contenthost.register_contenthost(module_org.label, module_ak_with_synced_repo['name']) + rhel7_contenthost.register(module_org, None, module_ak_with_synced_repo['name'], target_sat) assert rhel7_contenthost.subscribed rhel7_contenthost.run('yum repolist') access_log = target_sat.execute( @@ -2022,7 +2021,7 @@ def test_positive_accessible_content_status( @pytest.mark.tier2 @pytest.mark.parametrize( 'repo_options', - **parametrized([{'content_type': 'yum', 'url': CUSTOM_RPM_SHA}]), + **parametrized([{'content-type': 'yum', 'url': CUSTOM_RPM_SHA}]), indirect=True, ) def test_positive_sync_sha_repo(self, repo_options, module_target_sat): @@ -2047,7 +2046,7 @@ def test_positive_sync_sha_repo(self, repo_options, module_target_sat): @pytest.mark.tier2 @pytest.mark.parametrize( 'repo_options', - **parametrized([{'content_type': 'yum', 'url': CUSTOM_3RD_PARTY_REPO}]), + **parametrized([{'content-type': 'yum', 'url': CUSTOM_3RD_PARTY_REPO}]), indirect=True, ) def test_positive_sync_third_party_repo(self, repo_options, module_target_sat): @@ -2532,93 +2531,6 @@ def test_positive_sync_publish_promote_cv(self, repo, module_org, target_sat): assert lce['id'] in [lc['id'] for lc in cv['lifecycle-environments']] -@pytest.mark.skip_if_open("BZ:1682951") -class TestDRPMRepository: - """Tests specific to using repositories containing delta RPMs.""" - - @pytest.mark.tier2 - @pytest.mark.skip("Uses deprecated DRPM repository") - @pytest.mark.parametrize( - 'repo_options', **parametrized([{'url': FAKE_YUM_DRPM_REPO}]), indirect=True - ) - def test_positive_sync(self, repo, module_org, module_product, target_sat): - """Synchronize repository with DRPMs - - :id: a645966c-750b-40ef-a264-dc3bb632b9fd - - :parametrized: yes - - :expectedresults: drpms can be listed in repository - """ - target_sat.cli.Repository.synchronize({'id': repo['id']}) - result = target_sat.execute( - f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/Library" - f"/custom/{module_product.label}/{repo['label']}/drpms/ | grep .drpm" - ) - assert result.status == 0 - assert result.stdout - - @pytest.mark.tier2 - @pytest.mark.skip("Uses deprecated DRPM repository") - @pytest.mark.parametrize( - 'repo_options', **parametrized([{'url': FAKE_YUM_DRPM_REPO}]), indirect=True - ) - def test_positive_sync_publish_cv(self, repo, module_org, module_product, target_sat): - """Synchronize repository with DRPMs, add repository to content view - and publish content view - - :id: 014bfc80-4622-422e-a0ec-755b1d9f845e - - :parametrized: yes - - :expectedresults: drpms can be listed in content view - """ - target_sat.cli.Repository.synchronize({'id': repo['id']}) - cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) - target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) - target_sat.cli.ContentView.publish({'id': cv['id']}) - result = target_sat.execute( - f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/content_views/" - f"{cv['label']}/1.0/custom/{module_product.label}/{repo['label']}/drpms/ | grep .drpm" - ) - assert result.status == 0 - assert result.stdout - - @pytest.mark.tier2 - @pytest.mark.upgrade - @pytest.mark.skip("Uses deprecated DRPM repository") - @pytest.mark.parametrize( - 'repo_options', **parametrized([{'url': FAKE_YUM_DRPM_REPO}]), indirect=True - ) - def test_positive_sync_publish_promote_cv(self, repo, module_org, module_product, target_sat): - """Synchronize repository with DRPMs, add repository to content view, - publish and promote content view to lifecycle environment - - :id: a01cb12b-d388-4902-8532-714f4e28ec56 - - :parametrized: yes - - :expectedresults: drpms can be listed in content view in proper - lifecycle environment - """ - lce = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - target_sat.cli.Repository.synchronize({'id': repo['id']}) - cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) - target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) - target_sat.cli.ContentView.publish({'id': cv['id']}) - content_view = target_sat.cli.ContentView.info({'id': cv['id']}) - cvv = content_view['versions'][0] - target_sat.cli.ContentView.version_promote( - {'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']} - ) - result = target_sat.execute( - f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/{lce['label']}" - f"/{cv['label']}/custom/{module_product.label}/{repo['label']}/drpms/ | grep .drpm" - ) - assert result.status == 0 - assert result.stdout - - class TestFileRepository: """Specific tests for File Repositories""" diff --git a/tests/foreman/cli/test_repository_set.py b/tests/foreman/cli/test_repository_set.py index 092034c6dc8..71d7d091493 100644 --- a/tests/foreman/cli/test_repository_set.py +++ b/tests/foreman/cli/test_repository_set.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.constants import PRDS, REPOSET diff --git a/tests/foreman/cli/test_rhcloud_inventory.py b/tests/foreman/cli/test_rhcloud_inventory.py index 31847984e03..d8fbb53adbf 100644 --- a/tests/foreman/cli/test_rhcloud_inventory.py +++ b/tests/foreman/cli/test_rhcloud_inventory.py @@ -6,11 +6,12 @@ :CaseComponent: RHCloud -:Team: Platform +:Team: Phoenix-subscriptions :CaseImportance: High """ + from datetime import datetime import time diff --git a/tests/foreman/cli/test_role.py b/tests/foreman/cli/test_role.py index 7ccdafdf4f1..6e16e0f0759 100644 --- a/tests/foreman/cli/test_role.py +++ b/tests/foreman/cli/test_role.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from math import ceil from random import choice import re diff --git a/tests/foreman/cli/test_satellitesync.py b/tests/foreman/cli/test_satellitesync.py index d4b2b883c44..efc2b6045e9 100644 --- a/tests/foreman/cli/test_satellitesync.py +++ b/tests/foreman/cli/test_satellitesync.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import os from time import sleep diff --git a/tests/foreman/cli/test_settings.py b/tests/foreman/cli/test_settings.py index fdd005e02e8..3c8f37461cb 100644 --- a/tests/foreman/cli/test_settings.py +++ b/tests/foreman/cli/test_settings.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import random from time import sleep diff --git a/tests/foreman/cli/test_sso.py b/tests/foreman/cli/test_sso.py index 949e021ff48..e24b847ed73 100644 --- a/tests/foreman/cli/test_sso.py +++ b/tests/foreman/cli/test_sso.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest pytestmark = [pytest.mark.stubbed, pytest.mark.upgrade] diff --git a/tests/foreman/cli/test_subnet.py b/tests/foreman/cli/test_subnet.py index 74a6e1727fc..c7daca3bb27 100644 --- a/tests/foreman/cli/test_subnet.py +++ b/tests/foreman/cli/test_subnet.py @@ -11,6 +11,7 @@ :CaseImportance: Medium """ + import random import re diff --git a/tests/foreman/cli/test_subscription.py b/tests/foreman/cli/test_subscription.py index a7a74f4360c..bb48c6148f7 100644 --- a/tests/foreman/cli/test_subscription.py +++ b/tests/foreman/cli/test_subscription.py @@ -11,11 +11,14 @@ :CaseImportance: High """ + from fauxfactory import gen_string +from manifester import Manifester from nailgun import entities import pytest -from robottelo.constants import PRDS, REPOS, REPOSET +from robottelo.config import settings +from robottelo.constants import EXPIRED_MANIFEST, PRDS, REPOS, REPOSET, DataFile from robottelo.exceptions import CLIReturnCodeError pytestmark = [pytest.mark.run_in_one_thread] @@ -276,3 +279,44 @@ def test_positive_auto_attach_disabled_golden_ticket( with pytest.raises(CLIReturnCodeError) as context: target_sat.cli.Host.subscription_auto_attach({'host-id': host_id}) assert "This host's organization is in Simple Content Access mode" in str(context.value) + + +def test_negative_check_katello_reimport(target_sat, function_org): + """Verify katello:reimport trace should not fail with an TypeError + + :id: b7508a1c-7798-4649-83a3-cf94c7409c96 + + :steps: + 1. Import expired manifest & refresh + 2. Delete expired manifest + 3. Re-import new valid manifest & refresh + + :expectedresults: There should not be an error after reimport manifest + + :customerscenario: true + + :BZ: 2225534, 2253621 + """ + remote_path = f'/tmp/{EXPIRED_MANIFEST}' + target_sat.put(DataFile.EXPIRED_MANIFEST_FILE, remote_path) + # Import expired manifest & refresh + target_sat.cli.Subscription.upload({'organization-id': function_org.id, 'file': remote_path}) + with pytest.raises(CLIReturnCodeError): + target_sat.cli.Subscription.refresh_manifest({'organization-id': function_org.id}) + exec_val = target_sat.execute( + 'grep -i "Katello::HttpErrors::BadRequest: This Organization\'s subscription ' + 'manifest has expired. Please import a new manifest" /var/log/foreman/production.log' + ) + assert exec_val.status + # Delete expired manifest + target_sat.cli.Subscription.delete_manifest({'organization-id': function_org.id}) + # Re-import new manifest & refresh + manifester = Manifester(manifest_category=settings.manifest.golden_ticket) + manifest = manifester.get_manifest() + target_sat.upload_manifest(function_org.id, manifest.content) + ret_val = target_sat.cli.Subscription.refresh_manifest({'organization-id': function_org.id}) + assert 'Candlepin job status: SUCCESS' in ret_val + # Additional check, katello:reimport trace should not fail with TypeError + trace_output = target_sat.execute("foreman-rake katello:reimport --trace") + assert 'TypeError: no implicit conversion of String into Integer' not in trace_output.stdout + assert trace_output.status == 0 diff --git a/tests/foreman/cli/test_syncplan.py b/tests/foreman/cli/test_syncplan.py index 8669cd49a18..4569fd5aa61 100644 --- a/tests/foreman/cli/test_syncplan.py +++ b/tests/foreman/cli/test_syncplan.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from datetime import datetime, timedelta from time import sleep diff --git a/tests/foreman/cli/test_templatesync.py b/tests/foreman/cli/test_templatesync.py index b0cc31b2003..4d8aa850373 100644 --- a/tests/foreman/cli/test_templatesync.py +++ b/tests/foreman/cli/test_templatesync.py @@ -9,6 +9,7 @@ :Team: Endeavour """ + import base64 from fauxfactory import gen_string diff --git a/tests/foreman/cli/test_user.py b/tests/foreman/cli/test_user.py index 3646e301db0..8b9c2b97147 100644 --- a/tests/foreman/cli/test_user.py +++ b/tests/foreman/cli/test_user.py @@ -17,6 +17,7 @@ :CaseImportance: High """ + import datetime import random from time import sleep diff --git a/tests/foreman/cli/test_usergroup.py b/tests/foreman/cli/test_usergroup.py index 1c757b4e766..5ad9106a6fc 100644 --- a/tests/foreman/cli/test_usergroup.py +++ b/tests/foreman/cli/test_usergroup.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import random import pytest diff --git a/tests/foreman/cli/test_vm_install_products_package.py b/tests/foreman/cli/test_vm_install_products_package.py index f03fc19e997..e64f98aaaf8 100644 --- a/tests/foreman/cli/test_vm_install_products_package.py +++ b/tests/foreman/cli/test_vm_install_products_package.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from broker import Broker import pytest diff --git a/tests/foreman/cli/test_webhook.py b/tests/foreman/cli/test_webhook.py index 2849b6aa57d..b981f01f8b1 100644 --- a/tests/foreman/cli/test_webhook.py +++ b/tests/foreman/cli/test_webhook.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from functools import partial from random import choice diff --git a/tests/foreman/conftest.py b/tests/foreman/conftest.py index 339eb6016f9..1be6ba294e6 100644 --- a/tests/foreman/conftest.py +++ b/tests/foreman/conftest.py @@ -32,6 +32,8 @@ def pytest_collection_modifyitems(session, items, config): for item in items: if any("manifest" in f for f in getattr(item, "fixturenames", ())): item.add_marker("manifester") + if any("ldap" in f for f in getattr(item, "fixturenames", ())): + item.add_marker("ldap") # 1. Deselect tests marked with @pytest.mark.deselect # WONTFIX BZs makes test to be dynamically marked as deselect. deselect = item.get_closest_marker('deselect') diff --git a/tests/foreman/data/expired-manifest.zip b/tests/foreman/data/expired-manifest.zip new file mode 100644 index 00000000000..8737ba42db2 Binary files /dev/null and b/tests/foreman/data/expired-manifest.zip differ diff --git a/tests/foreman/destructive/test_ansible.py b/tests/foreman/destructive/test_ansible.py index 01779d6a14a..f48b7e996b0 100644 --- a/tests/foreman/destructive/test_ansible.py +++ b/tests/foreman/destructive/test_ansible.py @@ -11,9 +11,11 @@ :CaseImportance: High """ + +from fauxfactory import gen_string import pytest -pytestmark = pytest.mark.destructive +pytestmark = [pytest.mark.destructive, pytest.mark.upgrade] def test_positive_persistent_ansible_cfg_change(target_sat): @@ -68,3 +70,50 @@ def test_positive_import_all_roles(target_sat): # by default should work here. session.ansibleroles.delete('theforeman.foreman_scap_client') assert not session.ansibleroles.search('theforeman.foreman_scap_client') + + +@pytest.mark.parametrize('setting_update', ['entries_per_page=12'], indirect=True) +def test_positive_hostgroup_ansible_roles_tab_pagination(target_sat, setting_update): + """Import all Ansible roles available by default. + + :id: 53fe3857-a08f-493d-93c7-3fed331ed392 + + :steps: + 1. Navigate to the Configure > Roles page, and click the `Import from [hostname]` button + 2. Get total number of importable roles from pagination. + 3. Fill the `Select All` checkbox and click the `Submit` button + 4. Verify that number of imported roles == number of importable roles from step 2 + 5. Navigate to Administer > Settings > General tab and update the entries_per_page setting + 6. Navigate to `Ansible Roles` tab in Hostgroup create and edit page + 7. Verify the new per page entry is updated in pagination list + + :expectedresults: All imported roles should be available on the webUI and properly paginated + as per entries_per_page setting on create and edit hostgroup page. + + :BZ: 2166466, 2242915 + + :customerscenario: true + """ + setting_value = str( + target_sat.api.Setting().search(query={'search': 'name=entries_per_page'})[0].value + ) + with target_sat.ui_session() as session: + imported_roles = session.ansibleroles.import_all_roles() + total_role_count = str(session.ansibleroles.imported_roles_count) + assert imported_roles == int(total_role_count) + assert total_role_count > setting_value + + create_page = session.hostgroup.helper.read_filled_view( + 'New', read_widget_names=['ansible_roles.pagination'] + ) + assert create_page['ansible_roles']['pagination']['_items'].split()[2] == setting_value + assert create_page['ansible_roles']['pagination']['_items'].split()[-2] == total_role_count + + hg = target_sat.api.HostGroup(name=gen_string('alpha')).create() + edit_page = session.hostgroup.helper.read_filled_view( + 'Edit', + navigation_kwargs={'entity_name': hg.name}, + read_widget_names=['ansible_roles.pagination'], + ) + assert edit_page['ansible_roles']['pagination']['_items'].split()[2] == setting_value + assert edit_page['ansible_roles']['pagination']['_items'].split()[-2] == total_role_count diff --git a/tests/foreman/destructive/test_auth.py b/tests/foreman/destructive/test_auth.py index d97c592d05b..e1c50a50e08 100644 --- a/tests/foreman/destructive/test_auth.py +++ b/tests/foreman/destructive/test_auth.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + from fauxfactory import gen_string import pytest @@ -18,8 +19,6 @@ from robottelo.constants import HAMMER_CONFIG LOGEDIN_MSG = "Session exists, currently logged in as '{0}'" -LOGEDOFF_MSG = "Using sessions, you are currently not logged in" -NOTCONF_MSG = "Credentials are not configured." password = gen_string('alpha') pytestmark = pytest.mark.destructive @@ -36,7 +35,7 @@ def test_positive_password_reset(target_sat): """ result = target_sat.execute('foreman-rake permissions:reset') assert result.status == 0 - reset_password = result.stdout.splitlines()[0].split('password: ')[1] + reset_password = result.stdout.splitlines()[1].split('password: ')[1] result = target_sat.execute( f'''sed -i -e '/username/d;/password/d;/use_sessions/d' {HAMMER_CONFIG};\ echo ' :use_sessions: true' >> {HAMMER_CONFIG}''' @@ -46,5 +45,5 @@ def test_positive_password_reset(target_sat): {'username': settings.server.admin_username, 'password': reset_password} ) result = target_sat.cli.Auth.with_user().status() - assert LOGEDIN_MSG.format(settings.server.admin_username) in result[0]['message'] + assert LOGEDIN_MSG.format(settings.server.admin_username) in result.split("\n")[1] assert target_sat.cli.Org.with_user().list() diff --git a/tests/foreman/destructive/test_capsule.py b/tests/foreman/destructive/test_capsule.py index 1cc63f4fbbe..9d3ce22b420 100644 --- a/tests/foreman/destructive/test_capsule.py +++ b/tests/foreman/destructive/test_capsule.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/destructive/test_capsule_loadbalancer.py b/tests/foreman/destructive/test_capsule_loadbalancer.py index 9f94f13402c..6164eeb4eeb 100644 --- a/tests/foreman/destructive/test_capsule_loadbalancer.py +++ b/tests/foreman/destructive/test_capsule_loadbalancer.py @@ -11,9 +11,11 @@ :CaseImportance: High """ + import pytest from wrapanapi import VmState +from robottelo import constants from robottelo.config import settings from robottelo.constants import CLIENT_PORT, DataFile from robottelo.utils.installer import InstallerCommand @@ -22,34 +24,50 @@ @pytest.fixture(scope='module') -def content_for_client(module_target_sat, module_org, module_lce, module_cv, module_ak): +def content_for_client(module_target_sat, module_sca_manifest_org, module_lce, module_cv): """Setup content to be used by haproxy and client :return: Activation key, client lifecycle environment(used by setup_capsules()) """ - module_target_sat.cli_factory.setup_org_for_a_custom_repo( - { - 'url': settings.repos.RHEL7_OS, - 'organization-id': module_org.id, - 'content-view-id': module_cv.id, - 'lifecycle-environment-id': module_lce.id, - 'activationkey-id': module_ak.id, - } - ) - return {'client_ak': module_ak, 'client_lce': module_lce} + rhel_ver = settings.content_host.default_rhel_version + baseos = f'rhel{rhel_ver}_bos' + appstream = f'rhel{rhel_ver}_aps' + + rh_repos = [] + for repo in [baseos, appstream]: + synced_repo_id = module_target_sat.api_factory.enable_sync_redhat_repo( + constants.REPOS[repo], module_sca_manifest_org.id + ) + repo = module_target_sat.api.Repository(id=synced_repo_id).read() + rh_repos.append(repo) + + module_cv.repository = rh_repos + module_cv.update(['repository']) + module_cv.publish() + module_cv = module_cv.read() + cvv = module_cv.version[0] + cvv.promote(data={'environment_ids': module_lce.id}) + module_cv = module_cv.read() + ak = module_target_sat.api.ActivationKey( + content_view=module_cv, + environment=module_lce, + organization=module_sca_manifest_org, + ).create() + + return {'client_ak': ak, 'client_lce': module_lce} @pytest.fixture(scope='module') def setup_capsules( module_org, - rhel7_contenthost_module, + module_rhel_contenthost, module_lb_capsule, module_target_sat, content_for_client, ): """Install capsules with loadbalancer options""" - extra_cert_var = {'foreman-proxy-cname': rhel7_contenthost_module.hostname} - extra_installer_var = {'certs-cname': rhel7_contenthost_module.hostname} + extra_cert_var = {'foreman-proxy-cname': module_rhel_contenthost.hostname} + extra_installer_var = {'certs-cname': module_rhel_contenthost.hostname} for capsule in module_lb_capsule: capsule.register_to_cdn() @@ -92,20 +110,20 @@ def setup_capsules( @pytest.fixture(scope='module') def setup_haproxy( module_org, - rhel7_contenthost_module, + module_rhel_contenthost, content_for_client, module_target_sat, setup_capsules, ): """Install and configure haproxy and setup logging""" - haproxy = rhel7_contenthost_module + haproxy = module_rhel_contenthost # Using same AK for haproxy just for packages haproxy_ak = content_for_client['client_ak'] haproxy.execute('firewall-cmd --add-service RH-Satellite-6-capsule') haproxy.execute('firewall-cmd --runtime-to-permanent') haproxy.install_katello_ca(module_target_sat) haproxy.register_contenthost(module_org.label, haproxy_ak.name) - result = haproxy.execute('yum install haproxy policycoreutils-python -y') + result = haproxy.execute('yum install haproxy policycoreutils-python-utils -y') assert result.status == 0 haproxy.execute('rm -f /etc/haproxy/haproxy.cfg') haproxy.session.sftp_write( @@ -171,8 +189,9 @@ def loadbalancer_setup( @pytest.mark.e2e @pytest.mark.tier1 +@pytest.mark.rhel_ver_list([settings.content_host.default_rhel_version]) def test_loadbalancer_install_package( - loadbalancer_setup, setup_capsules, rhel7_contenthost, module_org, module_location, request + loadbalancer_setup, setup_capsules, rhel_contenthost, module_org, module_location, request ): r"""Install packages on a content host regardless of the registered capsule being available @@ -193,7 +212,7 @@ def test_loadbalancer_install_package( """ # Register content host - result = rhel7_contenthost.register( + result = rhel_contenthost.register( org=module_org, loc=module_location, activation_keys=loadbalancer_setup['content_for_client']['client_ak'].name, @@ -203,15 +222,15 @@ def test_loadbalancer_install_package( assert result.status == 0, f'Failed to register host: {result.stderr}' # Try package installation - result = rhel7_contenthost.execute('yum install -y tree') + result = rhel_contenthost.execute('yum install -y tree') assert result.status == 0 hosts = loadbalancer_setup['module_target_sat'].cli.Host.list( {'organization-id': loadbalancer_setup['module_org'].id} ) - assert rhel7_contenthost.hostname in [host['name'] for host in hosts] + assert rhel_contenthost.hostname in [host['name'] for host in hosts] - result = rhel7_contenthost.execute('rpm -qa | grep katello-ca-consumer') + result = rhel_contenthost.execute('rpm -qa | grep katello-ca-consumer') # Find which capsule the host is registered to since it's RoundRobin # The following also asserts the above result @@ -225,14 +244,14 @@ def test_loadbalancer_install_package( ) # Remove the packages from the client - result = rhel7_contenthost.execute('yum remove -y tree') + result = rhel_contenthost.execute('yum remove -y tree') assert result.status == 0 # Power off the capsule that the client is registered to registered_to_capsule.power_control(state=VmState.STOPPED, ensure=True) # Try package installation again - result = rhel7_contenthost.execute('yum install -y tree') + result = rhel_contenthost.execute('yum install -y tree') assert result.status == 0 diff --git a/tests/foreman/destructive/test_capsulecontent.py b/tests/foreman/destructive/test_capsulecontent.py index 12f3455c08d..d165b6ea841 100644 --- a/tests/foreman/destructive/test_capsulecontent.py +++ b/tests/foreman/destructive/test_capsulecontent.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from box import Box from fauxfactory import gen_alpha import pytest diff --git a/tests/foreman/destructive/test_clone.py b/tests/foreman/destructive/test_clone.py index f5f75dcd1a1..a7b786945e2 100644 --- a/tests/foreman/destructive/test_clone.py +++ b/tests/foreman/destructive/test_clone.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo import constants diff --git a/tests/foreman/destructive/test_contenthost.py b/tests/foreman/destructive/test_contenthost.py index d300ef9ab0e..99968345cea 100644 --- a/tests/foreman/destructive/test_contenthost.py +++ b/tests/foreman/destructive/test_contenthost.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/destructive/test_contentview.py b/tests/foreman/destructive/test_contentview.py index 03af433a939..c27275b653a 100644 --- a/tests/foreman/destructive/test_contentview.py +++ b/tests/foreman/destructive/test_contentview.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from nailgun.entity_mixins import TaskFailedError import pytest diff --git a/tests/foreman/destructive/test_discoveredhost.py b/tests/foreman/destructive/test_discoveredhost.py index 8034957ec72..86568230e0d 100644 --- a/tests/foreman/destructive/test_discoveredhost.py +++ b/tests/foreman/destructive/test_discoveredhost.py @@ -9,6 +9,7 @@ :CaseAutomation: Automated """ + from copy import copy import re diff --git a/tests/foreman/destructive/test_foreman_rake.py b/tests/foreman/destructive/test_foreman_rake.py index 5162fa94be2..1cbc67a7b20 100644 --- a/tests/foreman/destructive/test_foreman_rake.py +++ b/tests/foreman/destructive/test_foreman_rake.py @@ -11,6 +11,7 @@ :Team: Endeavour """ + import pytest pytestmark = pytest.mark.destructive diff --git a/tests/foreman/destructive/test_foreman_service.py b/tests/foreman/destructive/test_foreman_service.py index 6d0d24de110..fc033cd2965 100644 --- a/tests/foreman/destructive/test_foreman_service.py +++ b/tests/foreman/destructive/test_foreman_service.py @@ -7,6 +7,7 @@ :CaseImportance: Medium """ + import pytest from robottelo.constants import DEFAULT_ORG diff --git a/tests/foreman/destructive/test_host.py b/tests/foreman/destructive/test_host.py index bab00e5acab..4a8ae350f39 100644 --- a/tests/foreman/destructive/test_host.py +++ b/tests/foreman/destructive/test_host.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from airgun.exceptions import NoSuchElementException import pytest diff --git a/tests/foreman/destructive/test_infoblox.py b/tests/foreman/destructive/test_infoblox.py index 52e26830b14..3038cbee083 100644 --- a/tests/foreman/destructive/test_infoblox.py +++ b/tests/foreman/destructive/test_infoblox.py @@ -9,6 +9,7 @@ :CaseImportance: High """ + from fauxfactory import gen_mac, gen_string import pytest import requests diff --git a/tests/foreman/destructive/test_installer.py b/tests/foreman/destructive/test_installer.py index 36776213c13..8c61633fc53 100644 --- a/tests/foreman/destructive/test_installer.py +++ b/tests/foreman/destructive/test_installer.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import random from fauxfactory import gen_domain, gen_string diff --git a/tests/foreman/destructive/test_katello_agent.py b/tests/foreman/destructive/test_katello_agent.py index 86943017ee7..a18dc4321ca 100644 --- a/tests/foreman/destructive/test_katello_agent.py +++ b/tests/foreman/destructive/test_katello_agent.py @@ -16,10 +16,12 @@ :Upstream: No """ + import pytest from robottelo import constants from robottelo.config import settings +from robottelo.utils import ohsnap pytestmark = [ pytest.mark.run_in_one_thread, @@ -167,14 +169,16 @@ def test_positive_upgrade_warning(sat_with_katello_agent): 'which will automatically uninstall katello-agent.' ) - upstream_rpms = sat.get_repo_files_by_url(constants.FOREMAN_NIGHTLY_URL) - fm_rpm = [rpm for rpm in upstream_rpms if 'foreman_maintain' in rpm] - assert fm_rpm, 'No upstream foreman-maintain package found' - - for rpm in fm_rpm: - res = sat.execute(f'yum -y install {constants.FOREMAN_NIGHTLY_URL}{rpm}') - assert res.status == 0, f'{rpm} installation failed' + maintain_repo = ohsnap.dogfood_repository( + settings.ohsnap, + product='satellite', + repo='maintenance', + release=target_ver, + os_release=settings.server.version.rhel_version, + ) + sat.create_custom_repos(next_maintain=maintain_repo.baseurl) + sat.cli.Upgrade.list_versions() # f-m self-update res = sat.cli.Upgrade.list_versions() assert res.status == 0, 'Upgrade list-versions command failed' assert target_ver in res.stdout, 'Target version or Scenario not found' diff --git a/tests/foreman/destructive/test_katello_certs_check.py b/tests/foreman/destructive/test_katello_certs_check.py index db133e99749..c05a64d85b4 100644 --- a/tests/foreman/destructive/test_katello_certs_check.py +++ b/tests/foreman/destructive/test_katello_certs_check.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import re import pytest diff --git a/tests/foreman/destructive/test_ldap_authentication.py b/tests/foreman/destructive/test_ldap_authentication.py index 33539fff89e..c5a82bff6fa 100644 --- a/tests/foreman/destructive/test_ldap_authentication.py +++ b/tests/foreman/destructive/test_ldap_authentication.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import os from time import sleep @@ -196,9 +197,12 @@ def test_positive_create_with_https( assert ldap_source['ldap_server']['name'] == ldap_auth_name assert ldap_source['ldap_server']['host'] == auth_data['ldap_hostname'] assert ldap_source['ldap_server']['port'] == '636' - with module_target_sat.ui_session( - test_name, username, auth_data['ldap_user_passwd'] - ) as ldapsession, pytest.raises(NavigationTriesExceeded): + with ( + module_target_sat.ui_session( + test_name, username, auth_data['ldap_user_passwd'] + ) as ldapsession, + pytest.raises(NavigationTriesExceeded), + ): ldapsession.user.search('') assert module_target_sat.api.User().search(query={'search': f'login="{username}"'}) diff --git a/tests/foreman/destructive/test_ldapauthsource.py b/tests/foreman/destructive/test_ldapauthsource.py index 0cebd782532..1b6aeec2701 100644 --- a/tests/foreman/destructive/test_ldapauthsource.py +++ b/tests/foreman/destructive/test_ldapauthsource.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from time import sleep import pytest diff --git a/tests/foreman/destructive/test_leapp_satellite.py b/tests/foreman/destructive/test_leapp_satellite.py index 9022b4fec86..8d673b94645 100644 --- a/tests/foreman/destructive/test_leapp_satellite.py +++ b/tests/foreman/destructive/test_leapp_satellite.py @@ -9,6 +9,7 @@ :CaseImportance: High """ + from broker import Broker import pytest diff --git a/tests/foreman/destructive/test_packages.py b/tests/foreman/destructive/test_packages.py index f052948002f..b27065b9a96 100644 --- a/tests/foreman/destructive/test_packages.py +++ b/tests/foreman/destructive/test_packages.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import re import pytest diff --git a/tests/foreman/destructive/test_ping.py b/tests/foreman/destructive/test_ping.py index 96ae171c8f7..145ce663be6 100644 --- a/tests/foreman/destructive/test_ping.py +++ b/tests/foreman/destructive/test_ping.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import pytest pytestmark = pytest.mark.destructive diff --git a/tests/foreman/destructive/test_puppetplugin.py b/tests/foreman/destructive/test_puppetplugin.py index 9bd118ac4e7..3411f13000d 100644 --- a/tests/foreman/destructive/test_puppetplugin.py +++ b/tests/foreman/destructive/test_puppetplugin.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.constants import PUPPET_CAPSULE_INSTALLER, PUPPET_COMMON_INSTALLER_OPTS diff --git a/tests/foreman/destructive/test_realm.py b/tests/foreman/destructive/test_realm.py index cbddcdc3bc8..7ee6f519037 100644 --- a/tests/foreman/destructive/test_realm.py +++ b/tests/foreman/destructive/test_realm.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import random from fauxfactory import gen_string diff --git a/tests/foreman/destructive/test_registration.py b/tests/foreman/destructive/test_registration.py index 9c8dbfa4929..7b4aae79ebf 100644 --- a/tests/foreman/destructive/test_registration.py +++ b/tests/foreman/destructive/test_registration.py @@ -10,6 +10,7 @@ :Team: Rocket """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/destructive/test_remoteexecution.py b/tests/foreman/destructive/test_remoteexecution.py index c9c6bdb8b22..190708153a5 100644 --- a/tests/foreman/destructive/test_remoteexecution.py +++ b/tests/foreman/destructive/test_remoteexecution.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string from nailgun import client from nailgun.entity_mixins import TaskFailedError diff --git a/tests/foreman/destructive/test_rename.py b/tests/foreman/destructive/test_rename.py index 03fd9ff0d6f..8f06cd7a5f8 100644 --- a/tests/foreman/destructive/test_rename.py +++ b/tests/foreman/destructive/test_rename.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/destructive/test_repository.py b/tests/foreman/destructive/test_repository.py index 026134f2017..41c380df2ad 100644 --- a/tests/foreman/destructive/test_repository.py +++ b/tests/foreman/destructive/test_repository.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from nailgun.entity_mixins import TaskFailedError import pytest diff --git a/tests/foreman/endtoend/test_api_endtoend.py b/tests/foreman/endtoend/test_api_endtoend.py index dd1cb4fbd56..277feff85ce 100644 --- a/tests/foreman/endtoend/test_api_endtoend.py +++ b/tests/foreman/endtoend/test_api_endtoend.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from collections import defaultdict import http from pprint import pformat diff --git a/tests/foreman/endtoend/test_cli_endtoend.py b/tests/foreman/endtoend/test_cli_endtoend.py index 30de18bd960..635b3da4f64 100644 --- a/tests/foreman/endtoend/test_cli_endtoend.py +++ b/tests/foreman/endtoend/test_cli_endtoend.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_alphanumeric, gen_ipaddr import pytest @@ -266,14 +267,22 @@ def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel ) content_host = target_sat.cli.Host.with_user(user['login'], user['password']).info( - {'id': content_host['id']} + {'id': content_host['id']}, output_format='json' ) + # check that content view matches what we passed - assert content_host['content-information']['content-view']['name'] == content_view['name'] + assert ( + content_host['content-information']['content-view-environments']['1']['content-view'][ + 'name' + ] + == content_view['name'] + ) # check that lifecycle environment matches assert ( - content_host['content-information']['lifecycle-environment']['name'] + content_host['content-information']['content-view-environments']['1'][ + 'lifecycle-environment' + ]['name'] == lifecycle_environment['name'] ) diff --git a/tests/foreman/installer/test_infoblox.py b/tests/foreman/installer/test_infoblox.py index 5f839d4e850..b17b3db02ab 100644 --- a/tests/foreman/installer/test_infoblox.py +++ b/tests/foreman/installer/test_infoblox.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest diff --git a/tests/foreman/installer/test_installer.py b/tests/foreman/installer/test_installer.py index 451c3898e9d..fb121d0b2dc 100644 --- a/tests/foreman/installer/test_installer.py +++ b/tests/foreman/installer/test_installer.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import pytest import requests diff --git a/tests/foreman/longrun/test_inc_updates.py b/tests/foreman/longrun/test_inc_updates.py index 8ffb84fb36c..f06771f676d 100644 --- a/tests/foreman/longrun/test_inc_updates.py +++ b/tests/foreman/longrun/test_inc_updates.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from datetime import datetime, timedelta from nailgun import entities @@ -23,9 +24,11 @@ ENVIRONMENT, FAKE_4_CUSTOM_PACKAGE, PRDS, + REAL_RHEL8_1_ERRATA_ID, REPOS, REPOSET, ) +from robottelo.logging import logger pytestmark = [pytest.mark.run_in_one_thread] @@ -226,3 +229,110 @@ def test_positive_noapply_api( outval['action'] == 'Incremental Update of 1 Content View Version(s) with 1 Package(s), and 1 Errata' ) + + +@pytest.mark.tier3 +def test_positive_incremental_update_time(module_target_sat, module_entitlement_manifest_org): + """Incremental update should not take a long time. + + :id: a9cdcc58-2d10-42cf-8e24-f7bec3b79d6b + + :steps: + 1. Setup larger rh repositories; rhel8 baseOS, rhst8, rhsc8. + 2. Create content view and add repos, sync and wait. + 3. Publish a content view version with all content. + 4. Using hammer, perform incremental update with errata, on that new version. + - Log the duration of the incremental update + 5. Publish the full content-view, with added incremental version. + - Log the duration of the content-view publish + + :expectedresults: + 1. Incremental update takes a short amount of time. + 2. Incremental update takes less time than full content-view publish, + or the time taken for both was close (within 20%). + + :BZ: 2117760, 1829266 + + :customerscenario: true + + """ + # create content view + cv = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_entitlement_manifest_org.id} + ) + repo_sync_timestamp = ( + datetime.utcnow().replace(microsecond=0) - timedelta(seconds=1) + ).strftime('%Y-%m-%d %H:%M') + # setup rh repositories, add to cv, begin sync + for _repo in ['rhel8_bos', 'rhst8', 'rhsclient8']: + rh_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=DEFAULT_ARCHITECTURE, + org_id=module_entitlement_manifest_org.id, + product=PRDS['rhel8'], + repo=REPOS[_repo]['name'], + reposet=REPOSET[_repo], + releasever=REPOS[_repo]['releasever'], + ) + module_target_sat.cli.ContentView.add_repository( + { + 'id': cv['id'], + 'organization-id': module_entitlement_manifest_org.id, + 'repository-id': rh_repo_id, + } + ) + module_target_sat.api.Repository(id=rh_repo_id).sync(synchronous=False) + + # wait for all repo sync tasks + sync_tasks = module_target_sat.wait_for_tasks( + search_query=( + 'label = Actions::Katello::Repository::Sync' + f' and started_at >= "{repo_sync_timestamp}"' + ), + search_rate=10, + max_tries=200, + ) + assert all(task.poll()['result'] == 'success' for task in sync_tasks) + # publish and fetch new CVV + module_target_sat.cli.ContentView.publish({'id': cv['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': cv['id']}) + cvv = content_view['versions'][0] + + # update incremental version via hammer, using one errata. + # expect: incr. "version-1.1" is created + update_start_time = datetime.utcnow() + result = module_target_sat.cli.ContentView.version_incremental_update( + {'content-view-version-id': cvv['id'], 'errata-ids': REAL_RHEL8_1_ERRATA_ID} + ) + assert 'version-1.1' in str(result[0].keys()) + update_duration = (datetime.utcnow() - update_start_time).total_seconds() + logger.info( + f'Update of incremental version-1.1, for CV id: {content_view["id"]},' + f' took {update_duration} seconds.' + ) + # publish the full CV, containing the added version-1.1 + publish_start_time = datetime.utcnow() + result = module_target_sat.cli.ContentView.publish({'id': cv['id']}) + publish_duration = (datetime.utcnow() - publish_start_time).total_seconds() + logger.info(f'Publish for CV id: {content_view["id"]}, took {publish_duration} seconds.') + # Per BZs: expect update duration was quicker than publish duration, + # if instead, update took longer, check that they were close, + # that update did not take ~significantly more time. + if update_duration >= publish_duration: + # unexpected: perhaps both tasks were very quick, took a handful of seconds, + # assert the difference was not significant (within 20%). + assert (update_duration - publish_duration) / publish_duration <= 0.2, ( + f'Incremental update took longer than publish of entire content-view id: {content_view["id"]}:' + f' Update took significantly more time, 20% or longer, than publish.' + f' update duration: {update_duration} s.\n publish duration: {publish_duration} s.' + ) + # else: base expected condition: update duration was quicker than publish. + + # some arbritrary timeouts, given amount of content in CV from repos. + assert update_duration <= 20, ( + 'Possible performance degradation in incremental update time.', + f' Took {update_duration} seconds, but expected to not exceed 20 seconds.', + ) + assert publish_duration <= 30, ( + 'Possible performance degradation in content-view publish time, after performing incremental update.', + f' Took {publish_duration} seconds, but expected to not exceed 30 seconds.', + ) diff --git a/tests/foreman/longrun/test_oscap.py b/tests/foreman/longrun/test_oscap.py index ecf4da2857c..86711368439 100644 --- a/tests/foreman/longrun/test_oscap.py +++ b/tests/foreman/longrun/test_oscap.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from broker import Broker from fauxfactory import gen_string from nailgun import entities diff --git a/tests/foreman/maintain/test_advanced.py b/tests/foreman/maintain/test_advanced.py index 68b00e5c071..219a22d03da 100644 --- a/tests/foreman/maintain/test_advanced.py +++ b/tests/foreman/maintain/test_advanced.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import pytest import yaml diff --git a/tests/foreman/maintain/test_backup_restore.py b/tests/foreman/maintain/test_backup_restore.py index f81b268e844..0e110b78f1b 100644 --- a/tests/foreman/maintain/test_backup_restore.py +++ b/tests/foreman/maintain/test_backup_restore.py @@ -12,6 +12,7 @@ """ + import re from fauxfactory import gen_string @@ -136,7 +137,7 @@ def test_positive_backup_split_pulp_tar( assert 'FAIL' not in result.stdout # Check for expected files - backup_dir = re.findall(fr'{subdir}\/{instance}-backup-.*-[0-5][0-9]', result.stdout)[0] + backup_dir = re.findall(rf'{subdir}\/{instance}-backup-.*-[0-5][0-9]', result.stdout)[0] files = sat_maintain.execute(f'ls -a {backup_dir}').stdout.split('\n') files = [i for i in files if not re.compile(r'^\.*$').search(i)] @@ -180,7 +181,7 @@ def test_positive_backup_capsule_features( assert 'FAIL' not in result.stdout # Check for expected files - backup_dir = re.findall(fr'{subdir}\/{instance}-backup-.*-[0-5][0-9]', result.stdout)[0] + backup_dir = re.findall(rf'{subdir}\/{instance}-backup-.*-[0-5][0-9]', result.stdout)[0] files = sat_maintain.execute(f'ls -a {backup_dir}').stdout.split('\n') files = [i for i in files if not re.compile(r'^\.*$').search(i)] @@ -215,7 +216,7 @@ def test_positive_backup_all(sat_maintain, setup_backup_tests, module_synced_rep assert result.status == 0 assert 'FAIL' not in result.stdout - init_backup_dir = re.findall(fr'{subdir}\/{instance}-backup-.*-[0-5][0-9]', result.stdout)[0] + init_backup_dir = re.findall(rf'{subdir}\/{instance}-backup-.*-[0-5][0-9]', result.stdout)[0] result = sat_maintain.cli.Backup.run_backup( backup_dir=subdir, @@ -262,7 +263,7 @@ def test_positive_backup_offline_logical(sat_maintain, setup_backup_tests, modul assert 'FAIL' not in result.stdout # Check for expected files - backup_dir = re.findall(fr'{subdir}\/{instance}-backup-.*-[0-5][0-9]', result.stdout)[0] + backup_dir = re.findall(rf'{subdir}\/{instance}-backup-.*-[0-5][0-9]', result.stdout)[0] files = sat_maintain.execute(f'ls -a {backup_dir}').stdout.split('\n') files = [i for i in files if not re.compile(r'^\.*$').search(i)] @@ -435,7 +436,7 @@ def test_positive_puppet_backup_restore( assert 'FAIL' not in result.stdout # Check for expected files - backup_dir = re.findall(fr'{subdir}\/{instance}-backup-.*-[0-5][0-9]', result.stdout)[0] + backup_dir = re.findall(rf'{subdir}\/{instance}-backup-.*-[0-5][0-9]', result.stdout)[0] files = sat_maintain.execute(f'ls -a {backup_dir}').stdout.split('\n') files = [i for i in files if not re.compile(r'^\.*$').search(i)] @@ -521,7 +522,7 @@ def test_positive_backup_restore( assert 'FAIL' not in result.stdout # Check for expected files - backup_dir = re.findall(fr'{subdir}\/{instance}-backup-.*-[0-5][0-9]', result.stdout)[0] + backup_dir = re.findall(rf'{subdir}\/{instance}-backup-.*-[0-5][0-9]', result.stdout)[0] files = sat_maintain.execute(f'ls -a {backup_dir}').stdout.split('\n') files = [i for i in files if not re.compile(r'^\.*$').search(i)] @@ -605,7 +606,7 @@ def test_positive_backup_restore_incremental( assert result.status == 0 assert 'FAIL' not in result.stdout - init_backup_dir = re.findall(fr'{subdir}\/satellite-backup-.*-[0-5][0-9]', result.stdout)[0] + init_backup_dir = re.findall(rf'{subdir}\/satellite-backup-.*-[0-5][0-9]', result.stdout)[0] # create additional content secondary_repo = sat_maintain.api.Repository( @@ -624,7 +625,7 @@ def test_positive_backup_restore_incremental( assert 'FAIL' not in result.stdout # check for expected files - inc_backup_dir = re.findall(fr'{subdir}\/satellite-backup-.*-[0-5][0-9]', result.stdout)[0] + inc_backup_dir = re.findall(rf'{subdir}\/satellite-backup-.*-[0-5][0-9]', result.stdout)[0] files = sat_maintain.execute(f'ls -a {inc_backup_dir}').stdout.split('\n') files = [i for i in files if not re.compile(r'^\.*$').search(i)] diff --git a/tests/foreman/maintain/test_health.py b/tests/foreman/maintain/test_health.py index e8bc468d410..8c251314b1a 100644 --- a/tests/foreman/maintain/test_health.py +++ b/tests/foreman/maintain/test_health.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import time from fauxfactory import gen_string diff --git a/tests/foreman/maintain/test_maintenance_mode.py b/tests/foreman/maintain/test_maintenance_mode.py index b8d5024e1b1..a405371d516 100644 --- a/tests/foreman/maintain/test_maintenance_mode.py +++ b/tests/foreman/maintain/test_maintenance_mode.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import pytest import yaml diff --git a/tests/foreman/maintain/test_offload_DB.py b/tests/foreman/maintain/test_offload_DB.py index 0e1b3ced9d9..3c069658f98 100644 --- a/tests/foreman/maintain/test_offload_DB.py +++ b/tests/foreman/maintain/test_offload_DB.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/maintain/test_packages.py b/tests/foreman/maintain/test_packages.py index f0ae22d6d10..90483a6971f 100644 --- a/tests/foreman/maintain/test_packages.py +++ b/tests/foreman/maintain/test_packages.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/maintain/test_service.py b/tests/foreman/maintain/test_service.py index 9b2970a8a64..e25fee526f9 100644 --- a/tests/foreman/maintain/test_service.py +++ b/tests/foreman/maintain/test_service.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + from fauxfactory import gen_string import pytest from wait_for import wait_for @@ -226,7 +227,9 @@ def test_positive_foreman_service(sat_maintain): assert 'foreman' in result.stdout result = sat_maintain.cli.Service.status(options={'only': 'httpd'}) assert result.status == 0 - result = sat_maintain.cli.Health.check(options={'assumeyes': True}) + result = sat_maintain.cli.Health.check( + options={'assumeyes': True, 'whitelist': 'check-tftp-storage'} + ) assert result.status == 0 assert 'foreman' in result.stdout assert sat_maintain.cli.Service.start(options={'only': 'foreman'}).status == 0 diff --git a/tests/foreman/maintain/test_upgrade.py b/tests/foreman/maintain/test_upgrade.py index 686f036cd80..24de0381598 100644 --- a/tests/foreman/maintain/test_upgrade.py +++ b/tests/foreman/maintain/test_upgrade.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import pytest from robottelo.config import settings @@ -98,8 +99,14 @@ def test_positive_repositories_validate(sat_maintain): @pytest.mark.parametrize( 'custom_host', [ - {'deploy_rhel_version': '8', 'deploy_flavor': 'satqe-ssd.disk.xxxl'}, - {'deploy_rhel_version': '8', 'deploy_flavor': 'satqe-ssd.standard.std'}, + { + 'deploy_rhel_version': settings.server.version.rhel_version, + 'deploy_flavor': 'satqe-ssd.disk.xxxl', + }, + { + 'deploy_rhel_version': settings.server.version.rhel_version, + 'deploy_flavor': 'satqe-ssd.standard.std', + }, ], ids=['default', 'medium'], indirect=True, @@ -122,15 +129,18 @@ def test_negative_pre_upgrade_tuning_profile_check(request, custom_host): :expectedresults: Pre-upgrade check fails. """ profile = request.node.callspec.id + rhel_major = custom_host.os_version.major sat_version = ".".join(settings.server.version.release.split('.')[0:2]) - # Register to CDN for RHEL8 repos, download and enable last y stream's ohsnap repos, + # Register to CDN for RHEL repos, download and enable last y stream's ohsnap repos, # and enable the satellite module and install it on the host custom_host.register_to_cdn() last_y_stream = last_y_stream_version( SATELLITE_VERSION if sat_version == 'stream' else sat_version ) custom_host.download_repofile(product='satellite', release=last_y_stream) - custom_host.execute('dnf -y module enable satellite:el8 && dnf -y install satellite') + custom_host.execute( + f'dnf -y module enable satellite:el{rhel_major} && dnf -y install satellite' + ) # Install with development tuning profile to get around installer checks custom_host.execute( 'satellite-installer --scenario satellite --tuning development', diff --git a/tests/foreman/sanity/test_bvt.py b/tests/foreman/sanity/test_bvt.py index 7da1ccc3d8e..9c37152fa34 100644 --- a/tests/foreman/sanity/test_bvt.py +++ b/tests/foreman/sanity/test_bvt.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import re import pytest diff --git a/tests/foreman/sys/test_dynflow.py b/tests/foreman/sys/test_dynflow.py index 2b758bcb208..c93f3475d29 100644 --- a/tests/foreman/sys/test_dynflow.py +++ b/tests/foreman/sys/test_dynflow.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest diff --git a/tests/foreman/sys/test_fam.py b/tests/foreman/sys/test_fam.py index f4500a2b599..a797c39d792 100644 --- a/tests/foreman/sys/test_fam.py +++ b/tests/foreman/sys/test_fam.py @@ -11,6 +11,7 @@ :Team: Platform """ + from broker import Broker import pytest @@ -45,6 +46,19 @@ def setup_fam(module_target_sat, module_sca_manifest): # Execute AAP WF for FAM setup Broker().execute(workflow='fam-test-setup', source_vm=module_target_sat.name) + # Setup provisioning resources and copy config files to the Satellite + module_target_sat.configure_libvirt_cr() + module_target_sat.put( + settings.fam.server.to_yaml(), + f'{FAM_ROOT_DIR}/tests/test_playbooks/vars/server.yml', + temp_file=True, + ) + module_target_sat.put( + settings.fam.compute_profile.to_yaml(), + f'{FAM_ROOT_DIR}/tests/test_playbooks/vars/compute_profile.yml', + temp_file=True, + ) + # Edit Makefile to not try to rebuild the collection when tests run module_target_sat.execute(f"sed -i '/^live/ s/$(MANIFEST)//' {FAM_ROOT_DIR}/Makefile") @@ -53,27 +67,10 @@ def setup_fam(module_target_sat, module_sca_manifest): module_target_sat.execute( f'mv {module_sca_manifest.name} {FAM_ROOT_DIR}/tests/test_playbooks/data' ) - - # Edit config file config_file = f'{FAM_ROOT_DIR}/tests/test_playbooks/vars/server.yml' - module_target_sat.execute( - f'cp {FAM_ROOT_DIR}/tests/test_playbooks/vars/server.yml.example {config_file}' - ) - module_target_sat.execute( - f'sed -i "s/foreman.example.com/{module_target_sat.hostname}/g" {config_file}' - ) - module_target_sat.execute( - f'sed -i "s/rhsm_pool_id:.*/rhsm_pool_id: {settings.subscription.rhn_poolid}/g" {config_file}' - ) - module_target_sat.execute( - f'''sed -i 's/rhsm_username:.*/rhsm_username: "{settings.subscription.rhn_username}"/g' {config_file}''' - ) module_target_sat.execute( f'''sed -i 's|subscription_manifest_path:.*|subscription_manifest_path: "data/{module_sca_manifest.name}"|g' {config_file}''' ) - module_target_sat.execute( - f'''sed -i 's/rhsm_password:.*/rhsm_password: "{settings.subscription.rhn_password}"/g' {config_file}''' - ) @pytest.mark.pit_server diff --git a/tests/foreman/sys/test_katello_certs_check.py b/tests/foreman/sys/test_katello_certs_check.py index 640add5a0ae..bf93bc041f2 100644 --- a/tests/foreman/sys/test_katello_certs_check.py +++ b/tests/foreman/sys/test_katello_certs_check.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import re import pytest @@ -160,28 +161,6 @@ def test_positive_validate_katello_certs_check_output(self, cert_setup_teardown) result = target_sat.execute(command) self.validate_output(result, cert_data) - @pytest.mark.tier1 - def test_katello_certs_check_output_wildcard_inputs(self, cert_setup_teardown): - """Validate that katello-certs-check generates correct output with wildcard certs. - - :id: 7f9da806-5b23-11eb-b7ea-d46d6dd3b5b2 - - :steps: - - 1. Get valid wildcard certs from generate_certs - 2. Run katello-certs-check with the required valid arguments - katello-certs-check -c CERT_FILE -k KEY_FILE -r REQ_FILE - -b CA_BUNDLE_FILE - 3. Assert the output has correct commands with options - - :expectedresults: katello-certs-check should generate correct commands - with options. - """ - cert_data, target_sat = cert_setup_teardown - command = 'katello-certs-check -c certs/wildcard.crt -k certs/wildcard.key -b certs/ca.crt' - result = target_sat.execute(command) - self.validate_output(result, cert_data) - @pytest.mark.parametrize(('error', 'cert_file', 'key_file', 'ca_file'), invalid_inputs) @pytest.mark.tier1 def test_katello_certs_check_output_invalid_input( @@ -237,7 +216,7 @@ def test_negative_check_expiration_of_certificate(self, cert_setup_teardown): :expectedresults: Checking expiration of certificate check should fail. - :CaseAutomation: NotAutomated + :CaseAutomation: Automated """ cert_data, target_sat = cert_setup_teardown hostname = target_sat.hostname @@ -278,20 +257,3 @@ def test_negative_validate_certificate_subject(self): :CaseAutomation: NotAutomated """ - - @pytest.mark.stubbed - @pytest.mark.tier1 - def test_negative_check_private_key_match(self): - """Validate private key match with certificate. - - :id: 358edbb3-08b0-47d7-856b-ce0d5ea95979 - - :steps: - - 1. Have KEY_FILE with invalid private key - 2. Run katello-certs-check with the required arguments - - :expectedresults: Private key match with the certificate should fail. - - :CaseAutomation: NotAutomated - """ diff --git a/tests/foreman/sys/test_pulp3_filesystem.py b/tests/foreman/sys/test_pulp3_filesystem.py index ac747ca5f10..a0ab51630af 100644 --- a/tests/foreman/sys/test_pulp3_filesystem.py +++ b/tests/foreman/sys/test_pulp3_filesystem.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from datetime import datetime import json diff --git a/tests/foreman/ui/test_acs.py b/tests/foreman/ui/test_acs.py index 8874519a63b..8f643a38ad9 100644 --- a/tests/foreman/ui/test_acs.py +++ b/tests/foreman/ui/test_acs.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo import constants diff --git a/tests/foreman/ui/test_activationkey.py b/tests/foreman/ui/test_activationkey.py index 5dc53f39c1e..af75c1cf32f 100644 --- a/tests/foreman/ui/test_activationkey.py +++ b/tests/foreman/ui/test_activationkey.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import random from broker import Broker diff --git a/tests/foreman/ui/test_ansible.py b/tests/foreman/ui/test_ansible.py index 89eba62ff56..4a9b4162563 100644 --- a/tests/foreman/ui/test_ansible.py +++ b/tests/foreman/ui/test_ansible.py @@ -1,338 +1,687 @@ -"""Test class for Ansible Roles and Variables pages +"""Test class for Ansible-ConfigurationManagement and Ansible-RemoteExecution components :Requirement: Ansible :CaseAutomation: Automated -:CaseComponent: Ansible-ConfigurationManagement - :Team: Rocket :CaseImportance: Critical - """ + from fauxfactory import gen_string import pytest +from wait_for import wait_for import yaml from robottelo import constants from robottelo.config import robottelo_tmp_dir, settings -def test_positive_create_and_delete_variable(target_sat): - """Create an Ansible variable with the minimum required values, then delete the variable. +class TestAnsibleCfgMgmt: + """Test class for Configuration Management with Ansible - :id: 7006d7c7-788a-4447-a564-d6b03ec06aaf + :CaseComponent: Ansible-ConfigurationManagement + """ - :steps: + @pytest.mark.tier2 + def test_positive_create_and_delete_variable(self, target_sat): + """Create an Ansible variable with the minimum required values, then delete the variable. + + :id: 7006d7c7-788a-4447-a564-d6b03ec06aaf + + :steps: + 1. Import Ansible roles if none have been imported yet. + 2. Create an Ansible variable with only a name and an assigned Ansible role. + 3. Verify that the Ansible variable has been created. + 4. Delete the Ansible variable. + 5. Verify that the Ansible Variable has been deleted. + + :expectedresults: The variable is successfully created and deleted. + """ + key = gen_string('alpha') + SELECTED_ROLE = 'redhat.satellite.activation_keys' + proxy_id = target_sat.nailgun_smart_proxy.id + target_sat.api.AnsibleRoles().sync( + data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]} + ) + with target_sat.ui_session() as session: + session.ansiblevariables.create( + { + 'key': key, + 'ansible_role': SELECTED_ROLE, + } + ) + assert session.ansiblevariables.search(key)[0]['Name'] == key + session.ansiblevariables.delete(key) + assert not session.ansiblevariables.search(key) + + @pytest.mark.tier3 + def test_positive_create_variable_with_overrides(self, target_sat): + """Create an Ansible variable with all values populated. + + :id: 90acea37-4c2f-42e5-92a6-0c88148f4fb6 + + :steps: + 1. Import Ansible roles if none have been imported yet. + 2. Create an Anible variable, populating all fields on the creation form. + 3. Verify that the Ansible variable was created successfully. + 4. Delete the Ansible variable. + 5. Verify that the Ansible Variable has been deleted. + + :expectedresults: The variable is successfully created. + """ + key = gen_string('alpha') + SELECTED_ROLE = 'redhat.satellite.activation_keys' + proxy_id = target_sat.nailgun_smart_proxy.id + target_sat.api.AnsibleRoles().sync( + data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]} + ) + with target_sat.ui_session() as session: + session.ansiblevariables.create_with_overrides( + { + 'key': key, + 'description': 'this is a description', + 'ansible_role': SELECTED_ROLE, + 'parameter_type': 'integer', + 'default_value': '11', + 'validator_type': 'list', + 'validator_rule': '11, 12, 13', + 'attribute_order': 'domain \n fqdn \n hostgroup \n os', + 'matcher_section.params': [ + { + 'attribute_type': {'matcher_key': 'os', 'matcher_value': 'fedora'}, + 'value': '13', + } + ], + } + ) + assert session.ansiblevariables.search(key)[0]['Name'] == key + session.ansiblevariables.delete(key) + assert not session.ansiblevariables.search(key) + + @pytest.mark.tier2 + def test_positive_host_role_information(self, target_sat, function_host): + """Assign Ansible Role to a Host and verify that the information + in the new UI is displayed correctly + + :id: 7da913ef-3b43-4bfa-9a45-d895431c8b56 + + :steps: + 1. Register a RHEL host to Satellite. + 2. Import all roles available by default. + 3. Assign one role to the RHEL host. + 4. Navigate to the new UI for the given Host. + 5. Select the 'Ansible' tab, then the 'Inventory' sub-tab. + + :expectedresults: Roles assigned directly to the Host are visible on the subtab. + """ + SELECTED_ROLE = 'RedHatInsights.insights-client' + + location = function_host.location.read() + organization = function_host.organization.read() + proxy_id = target_sat.nailgun_smart_proxy.id + target_sat.api.AnsibleRoles().sync( + data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]} + ) + target_sat.cli.Host.ansible_roles_assign( + {'id': function_host.id, 'ansible-roles': SELECTED_ROLE} + ) + host_roles = function_host.list_ansible_roles() + assert host_roles[0]['name'] == SELECTED_ROLE + with target_sat.ui_session() as session: + session.organization.select(organization.name) + session.location.select(location.name) + ansible_roles_table = session.host_new.get_ansible_roles(function_host.name) + assert ansible_roles_table[0]['Name'] == SELECTED_ROLE + all_assigned_roles_table = session.host_new.get_ansible_roles_modal(function_host.name) + assert all_assigned_roles_table[0]['Name'] == SELECTED_ROLE + + @pytest.mark.rhel_ver_match('8') + def test_positive_assign_ansible_role_variable_on_host( + self, + request, + target_sat, + rhel_contenthost, + module_activation_key, + module_org, + module_location, + ): + """Verify ansible variable is added to the role and attached to the host. + + :id: 7ec4fe19-5a08-4b10-bb4e-7327dd68699a + + :BZ: 2170727 + + :customerscenario: true + + :steps: + 1. Create an Ansible variable with array type and set the default value. + 2. Enable both 'Merge Overrides' and 'Merge Default'. + 3. Add the variable to a role and attach the role to the host. + 4. Verify that ansible role and variable is added to the host. + + :expectedresults: The role and variable is successfully added to the host. + """ + + @request.addfinalizer + def _finalize(): + result = target_sat.cli.Ansible.roles_delete({'name': SELECTED_ROLE}) + assert f'Ansible role [{SELECTED_ROLE}] was deleted.' in result[0]['message'] + + key = gen_string('alpha') + SELECTED_ROLE = 'redhat.satellite.activation_keys' + proxy_id = target_sat.nailgun_smart_proxy.id + target_sat.api.AnsibleRoles().sync( + data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]} + ) + command = target_sat.api.RegistrationCommand( + organization=module_org, + location=module_location, + activation_keys=[module_activation_key.name], + ).create() + result = rhel_contenthost.execute(command) + assert result.status == 0, f'Failed to register host: {result.stderr}' + target_host = rhel_contenthost.nailgun_host + default_value = '[\"test\"]' # fmt: skip + parameter_type = 'array' + with target_sat.ui_session() as session: + session.organization.select(org_name=module_org.name) + session.location.select(loc_name=module_location.name) + session.ansiblevariables.create_with_overrides( + { + 'key': key, + 'ansible_role': SELECTED_ROLE, + 'override': 'true', + 'parameter_type': parameter_type, + 'default_value': default_value, + 'validator_type': None, + 'attribute_order': 'domain \n fqdn \n hostgroup \n os', + 'merge_default': 'true', + 'merge_overrides': 'true', + 'matcher_section.params': [ + { + 'attribute_type': {'matcher_key': 'os', 'matcher_value': 'fedora'}, + 'value': '[\'13\']', + } + ], + } + ) + result = target_sat.cli.Host.ansible_roles_assign( + {'id': target_host.id, 'ansible-roles': SELECTED_ROLE} + ) + assert 'Ansible roles were assigned' in result[0]['message'] + values = session.host_new.get_details(rhel_contenthost.hostname, 'ansible')['ansible'][ + 'variables' + ]['table'] + assert (key, SELECTED_ROLE, default_value, parameter_type) in [ + (var['Name'], var['Ansible role'], var['Value'], var['Type']) for var in values + ] + + @pytest.mark.stubbed + @pytest.mark.tier2 + def test_positive_role_variable_information(self): + """Create and assign variables to an Ansible Role and verify that the information in + the new UI is displayed correctly + + :id: 4ab2813a-6b83-4907-b104-0473465814f5 + + :steps: + 1. Register a RHEL host to Satellite. + 2. Import all roles available by default. + 3. Create a host group and assign one of the Ansible roles to the host group. + 4. Assign the host to the host group. + 5. Assign one roles to the RHEL host. + 6. Create a variable and associate it with the role assigned to the Host. + 7. Create a variable and associate it with the role assigned to the Hostgroup. + 8. Navigate to the new UI for the given Host. + 9. Select the 'Ansible' tab, then the 'Variables' sub-tab. + + :expectedresults: The variables information for the given Host is visible. + """ + + @pytest.mark.stubbed + @pytest.mark.tier2 + def test_positive_assign_role_in_new_ui(self): + """Using the new Host UI, assign a role to a Host + + :id: 044f38b4-cff2-4ddc-b93c-7e9f2826d00d + + :steps: + 1. Register a RHEL host to Satellite. + 2. Import all roles available by default. + 3. Navigate to the new UI for the given Host. + 4. Select the 'Ansible' tab + 5. Click the 'Assign Ansible Roles' button. + 6. Using the popup, assign a role to the Host. + + :expectedresults: The Role is successfully assigned to the Host, and visible on the UI + """ + + @pytest.mark.stubbed + @pytest.mark.tier2 + def test_positive_remove_role_in_new_ui(self): + """Using the new Host UI, remove the role(s) of a Host + + :id: d6de5130-45f6-4349-b490-fbde2aed082c + + :steps: + 1. Register a RHEL host to Satellite. + 2. Import all roles available by default. + 3. Assign a role to the host. + 4. Navigate to the new UI for the given Host. + 5. Select the 'Ansible' tab + 6. Click the 'Edit Ansible roles' button. + 7. Using the popup, remove the assigned role from the Host. + + :expectedresults: Role is successfully removed from the Host, and not visible on the UI + """ + + @pytest.mark.stubbed + @pytest.mark.tier3 + def test_positive_ansible_config_report_failed_tasks_errors(self): + """Check that failed Ansible tasks show as errors in the config report + + :id: 1a91e534-143f-4f35-953a-7ad8b7d2ddf3 + + :steps: + 1. Import Ansible roles + 2. Assign Ansible roles to a host + 3. Run Ansible roles on host + + :expectedresults: Verify that any task failures are listed as errors in the config report + + :CaseAutomation: NotAutomated + """ + + @pytest.mark.stubbed + @pytest.mark.tier3 + def test_positive_ansible_config_report_changes_notice(self): + """Check that Ansible tasks that make changes on a host show as notice in the config report + + :id: 8c90f179-8b70-4932-a477-75dc3566c437 + + :steps: + 1. Import Ansible Roles + 2. Assign Ansible roles to a host + 3. Run Ansible Roles on a host + + :expectedresults: Verify that any tasks that make changes on the host + are listed as notice in the config report - 1. Import Ansible roles if none have been imported yet. - 2. Create an Ansible variable with only a name and an assigned Ansible role. - 3. Verify that the Ansible variable has been created. - 4. Delete the Ansible variable. - 5. Verify that the Ansible Variable has been deleted. + :CaseAutomation: NotAutomated + """ - :expectedresults: The variable is successfully created and deleted. - """ - key = gen_string('alpha') - - SELECTED_ROLE = 'redhat.satellite.activation_keys' - proxy_id = target_sat.nailgun_smart_proxy.id - target_sat.api.AnsibleRoles().sync(data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]}) - with target_sat.ui_session() as session: - session.ansiblevariables.create( - { - 'key': key, - 'ansible_role': SELECTED_ROLE, - } - ) - assert session.ansiblevariables.search(key)[0]['Name'] == key - session.ansiblevariables.delete(key) - assert not session.ansiblevariables.search(key) + @pytest.mark.stubbed + @pytest.mark.tier3 + def test_positive_ansible_variables_imported_with_roles(self): + """Verify that, when Ansible roles are imported, their variables are imported simultaneously + :id: 107c53e8-5a8a-4291-bbde-fbd66a0bb85e -def test_positive_create_variable_with_overrides(target_sat): - """Create an Ansible variable with all values populated. + :steps: + 1. Import Ansible roles + 2. Navigate to Configure > Variables - :id: 90acea37-4c2f-42e5-92a6-0c88148f4fb6 + :expectedresults: Verify that any variables in the role were also imported to Satellite - :steps: + :CaseAutomation: NotAutomated + """ - 1. Import Ansible roles if none have been imported yet. - 2. Create an Anible variable, populating all fields on the creation form. - 3. Verify that the Ansible variable was created successfully. + @pytest.mark.stubbed + @pytest.mark.tier3 + def test_positive_ansible_roles_ignore_list(self): + """Verify that the ignore list setting prevents selected roles from being available for import - :expectedresults: The variable is successfully created. - """ - key = gen_string('alpha') - - SELECTED_ROLE = 'redhat.satellite.activation_keys' - proxy_id = target_sat.nailgun_smart_proxy.id - target_sat.api.AnsibleRoles().sync(data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]}) - with target_sat.ui_session() as session: - session.ansiblevariables.create_with_overrides( - { - 'key': key, - 'description': 'this is a description', - 'ansible_role': SELECTED_ROLE, - 'parameter_type': 'integer', - 'default_value': '11', - 'validator_type': 'list', - 'validator_rule': '11, 12, 13', - 'attribute_order': 'domain \n fqdn \n hostgroup \n os', - 'matcher_section.params': [ - { - 'attribute_type': {'matcher_key': 'os', 'matcher_value': 'fedora'}, - 'value': '13', - } - ], - } - ) - assert session.ansiblevariables.search(key)[0]['Name'] == key + :id: 6fa1d8f0-b583-4a07-88eb-c9ae7fcd0219 + :steps: + 1. Add roles to the ignore list in Administer > Settings > Ansible + 2. Navigate to Configure > Roles -@pytest.mark.pit_server -@pytest.mark.no_containers -@pytest.mark.rhel_ver_match('[^6]') -def test_positive_config_report_ansible(session, target_sat, module_org, rhel_contenthost): - """Test Config Report generation with Ansible Jobs. + :expectedresults: Verify that any roles on the ignore list are not available for import - :id: 118e25e5-409e-44ba-b908-217da9722576 + :CaseAutomation: NotAutomated + """ - :steps: - 1. Register a content host with satellite - 2. Import a role into satellite - 3. Assign that role to a host - 4. Assert that the role was assigned to the host successfully - 5. Run the Ansible playbook associated with that role - 6. Check if the report is created successfully + @pytest.mark.stubbed + @pytest.mark.tier3 + def test_positive_ansible_variables_installed_with_collection(self): + """Verify that installing an Ansible collection also imports + any variables associated with the collection - :expectedresults: - 1. Host should be assigned the proper role. - 2. Job report should be created. + :id: 7ff88022-fe9b-482f-a6bb-3922036a1e1c - :CaseComponent: Ansible-RemoteExecution - """ - SELECTED_ROLE = 'RedHatInsights.insights-client' - if rhel_contenthost.os_version.major <= 7: - rhel_contenthost.create_custom_repos(rhel7=settings.repos.rhel7_os) - assert rhel_contenthost.execute('yum install -y insights-client').status == 0 - rhel_contenthost.install_katello_ca(target_sat) - rhel_contenthost.register_contenthost(module_org.label, force=True) - assert rhel_contenthost.subscribed - rhel_contenthost.add_rex_key(satellite=target_sat) - id = target_sat.nailgun_smart_proxy.id - target_host = rhel_contenthost.nailgun_host - target_sat.api.AnsibleRoles().sync(data={'proxy_id': id, 'role_names': [SELECTED_ROLE]}) - target_sat.cli.Host.ansible_roles_assign({'id': target_host.id, 'ansible-roles': SELECTED_ROLE}) - host_roles = target_host.list_ansible_roles() - assert host_roles[0]['name'] == SELECTED_ROLE - template_id = ( - target_sat.api.JobTemplate() - .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] - .id - ) - job = target_sat.api.JobInvocation().run( - synchronous=False, - data={ - 'job_template_id': template_id, - 'targeting_type': 'static_query', - 'search_query': f'name = {rhel_contenthost.hostname}', - }, - ) - target_sat.wait_for_tasks( - f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000 - ) - result = target_sat.api.JobInvocation(id=job['id']).read() - assert result.succeeded == 1 - with session: - session.location.select(constants.DEFAULT_LOC) - assert session.host.search(target_host.name)[0]['Name'] == rhel_contenthost.hostname - session.configreport.search(rhel_contenthost.hostname) - session.configreport.delete(rhel_contenthost.hostname) - assert len(session.configreport.read()['table']) == 0 - - -@pytest.mark.no_containers -@pytest.mark.rhel_ver_match('9') -def test_positive_ansible_custom_role(target_sat, session, module_org, rhel_contenthost, request): - """ - Test Config report generation with Custom Ansible Role + :steps: + 1. Install an Ansible collection + 2. Navigate to Configure > Variables - :id: 3551068a-ccfc-481c-b7ec-8fe2b8a802bf + :expectedresults: Verify that any variables associated with the collection + are present on Configure > Variables - :customerscenario: true + :CaseAutomation: NotAutomated + """ - :steps: - 1. Register a content host with satellite - 2. Create a custom role and import into satellite - 3. Assign that role to a host - 4. Assert that the role was assigned to the host successfully - 5. Run the Ansible playbook associated with that role - 6. Check if the report is created successfully + @pytest.mark.stubbed + @pytest.mark.tier3 + def test_positive_set_ansible_role_order_per_host(self): + """Verify that role run order can be set and this order is respected when roles are run - :expectedresults: - 1. Config report should be generated for a custom role run. + :id: 24fbcd60-7cd1-46ff-86ac-16d6b436202c - :BZ: 2155392 + :steps: + 1. Enable a host for remote execution + 2. Navigate to Hosts > All Hosts > $hostname > Edit > Ansible Roles + 3. Assign more than one role to the host + 4. Use the drag-and-drop mechanism to change the order of the roles + 5. Run Ansible roles on the host - :CaseComponent: Ansible-RemoteExecution - """ + :expectedresults: The roles are run in the specified order - @request.addfinalizer - def _finalize(): - result = target_sat.cli.Ansible.roles_delete({'name': SELECTED_ROLE}) - assert f'Ansible role [{SELECTED_ROLE}] was deleted.' in result[0]['message'] - target_sat.execute('rm -rvf /etc/ansible/roles/custom_role') - - SELECTED_ROLE = 'custom_role' - playbook = f'{robottelo_tmp_dir}/playbook.yml' - data = { - 'name': 'Copy ssh keys', - 'copy': { - 'src': '/var/lib/foreman-proxy/ssh/{{ item }}', - 'dest': '/root/.ssh', - 'owner': 'root', - "group": 'root', - 'mode': '0400', - }, - 'with_items': ['id_rsa_foreman_proxy.pub', 'id_rsa_foreman_proxy'], - } - with open(playbook, 'w') as f: - yaml.dump(data, f, sort_keys=False, default_flow_style=False) - target_sat.execute('mkdir /etc/ansible/roles/custom_role') - target_sat.put(playbook, '/etc/ansible/roles/custom_role/playbook.yaml') - rhel_contenthost.install_katello_ca(target_sat) - rhel_contenthost.register_contenthost(module_org.label, force=True) - assert rhel_contenthost.subscribed - rhel_contenthost.add_rex_key(satellite=target_sat) - proxy_id = target_sat.nailgun_smart_proxy.id - target_host = rhel_contenthost.nailgun_host - target_sat.api.AnsibleRoles().sync(data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]}) - target_sat.cli.Host.ansible_roles_assign({'id': target_host.id, 'ansible-roles': SELECTED_ROLE}) - host_roles = target_host.list_ansible_roles() - assert host_roles[0]['name'] == SELECTED_ROLE - - template_id = ( - target_sat.api.JobTemplate() - .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] - .id - ) - job = target_sat.api.JobInvocation().run( - synchronous=False, - data={ - 'job_template_id': template_id, - 'targeting_type': 'static_query', - 'search_query': f'name = {rhel_contenthost.hostname}', - }, - ) - target_sat.wait_for_tasks( - f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000 - ) - result = target_sat.api.JobInvocation(id=job['id']).read() - assert result.succeeded == 1 - with session: - session.location.select(constants.DEFAULT_LOC) - assert session.host.search(target_host.name)[0]['Name'] == rhel_contenthost.hostname - session.configreport.search(rhel_contenthost.hostname) - session.configreport.delete(rhel_contenthost.hostname) - assert len(session.configreport.read()['table']) == 0 - - -@pytest.mark.tier2 -def test_positive_host_role_information(target_sat, function_host): - """Assign Ansible Role to a Host and verify that the information - in the new UI is displayed correctly - - :id: 7da913ef-3b43-4bfa-9a45-d895431c8b56 - - :steps: - 1. Register a RHEL host to Satellite. - 2. Import all roles available by default. - 3. Assign one role to the RHEL host. - 4. Navigate to the new UI for the given Host. - 5. Select the 'Ansible' tab, then the 'Inventory' sub-tab. - - :expectedresults: Roles assigned directly to the Host are visible on the subtab. - """ - SELECTED_ROLE = 'RedHatInsights.insights-client' - - location = function_host.location.read() - organization = function_host.organization.read() - proxy_id = target_sat.nailgun_smart_proxy.id - target_sat.api.AnsibleRoles().sync(data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]}) - target_sat.cli.Host.ansible_roles_assign( - {'id': function_host.id, 'ansible-roles': SELECTED_ROLE} - ) - host_roles = function_host.list_ansible_roles() - assert host_roles[0]['name'] == SELECTED_ROLE - with target_sat.ui_session() as session: - session.location.select(location.name) - session.organization.select(organization.name) - ansible_roles_table = session.host_new.get_ansible_roles(function_host.name) - assert ansible_roles_table[0]["Name"] == SELECTED_ROLE - all_assigned_roles_table = session.host_new.get_ansible_roles_modal(function_host.name) - assert all_assigned_roles_table[0]["Name"] == SELECTED_ROLE - - -@pytest.mark.stubbed -@pytest.mark.tier2 -def test_positive_role_variable_information(self): - """Create and assign variables to an Ansible Role and verify that the information in - the new UI is displayed correctly - - :id: 4ab2813a-6b83-4907-b104-0473465814f5 - - :steps: - 1. Register a RHEL host to Satellite. - 2. Import all roles available by default. - 3. Create a host group and assign one of the Ansible roles to the host group. - 4. Assign the host to the host group. - 5. Assign one roles to the RHEL host. - 6. Create a variable and associate it with the role assigned to the Host. - 7. Create a variable and associate it with the role assigned to the Hostgroup. - 8. Navigate to the new UI for the given Host. - 9. Select the 'Ansible' tab, then the 'Variables' sub-tab. - - :expectedresults: The variables information for the given Host is visible. - """ + :CaseAutomation: NotAutomated + """ + @pytest.mark.stubbed + @pytest.mark.tier3 + def test_positive_set_ansible_role_order_per_hostgroup(self): + """Verify that role run order can be set and that this order is respected when roles are run -@pytest.mark.stubbed -@pytest.mark.tier2 -def test_positive_assign_role_in_new_ui(self): - """Using the new Host UI, assign a role to a Host + :id: 9eb5bc8e-081a-45b9-8751-f4220c944da6 - :id: 044f38b4-cff2-4ddc-b93c-7e9f2826d00d + :steps: + 1. Enable a host for remote execution + 2. Create a host group + 3. Navigate to Configure > Host Groups > $hostgroup > Ansible Roles + 4. Assign more than one role to the host group + 5. Use the drag-and-drop mechanism to change the order of the roles + 6. Add the host to the host group + 7. Run Ansible roles on the host group - :steps: - 1. Register a RHEL host to Satellite. - 2. Import all roles available by default. - 3. Navigate to the new UI for the given Host. - 4. Select the 'Ansible' tab - 5. Click the 'Assign Ansible Roles' button. - 6. Using the popup, assign a role to the Host. + :expectedresults: The roles are run in the specified order - :expectedresults: The Role is successfully assigned to the Host, and shows up on the UI - """ + :CaseAutomation: NotAutomated + """ + @pytest.mark.tier2 + def test_positive_assign_and_remove_ansible_role_to_host(self, target_sat, function_host): + """Add and remove the role(s) of a Host -@pytest.mark.stubbed -@pytest.mark.tier2 -def test_positive_remove_role_in_new_ui(self): - """Using the new Host UI, remove the role(s) of a Host + :id: a61b4c05-1395-47c2-b6d9-fcff8b094e0e - :id: d6de5130-45f6-4349-b490-fbde2aed082c + :setup: Used pre-defined function_host (component/host) registerd with satellite. - :steps: - 1. Register a RHEL host to Satellite. - 2. Import all roles available by default. - 3. Assign a role to the host. - 4. Navigate to the new UI for the given Host. - 5. Select the 'Ansible' tab - 6. Click the 'Edit Ansible roles' button. - 7. Using the popup, remove the assigned role from the Host. + :steps: + 1. Import all roles available by default. + 2. Assign a role to the host. + 3. Navigate to the new UI for the given Host. + 4. Select the 'Ansible' tab + 5. Click the 'Edit Ansible roles' button. + 6. Using the popup, remove the assigned role from the Host. - :expectedresults: The Role is successfully removed from the Host, and no longer shows - up on the UI + :expectedresults: The Role is successfully aaded and removed from the Host, and no longer shows + up on the UI + """ + SELECTED_ROLE = 'RedHatInsights.insights-client' + + location = function_host.location.read() + organization = function_host.organization.read() + proxy_id = target_sat.nailgun_smart_proxy.id + target_sat.api.AnsibleRoles().sync( + data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]} + ) + with target_sat.ui_session() as session: + session.location.select(location.name) + session.organization.select(organization.name) + # add ansible role + session.host_new.add_single_ansible_role(function_host.name) + wait_for(lambda: session.browser.refresh(), timeout=5) + # verify ansible role assigned to new UI for the given Host + ansible_roles_table = session.host_new.get_ansible_roles(function_host.name) + assert ansible_roles_table[0]['Name'] == SELECTED_ROLE + # remove ansible role + session.host_new.remove_single_ansible_role(function_host.name) + # verify ansible role removed + result = session.host_new.get_details( + function_host.name, widget_names='ansible.roles.noRoleAssign' + ) + assert ( + result['ansible']['roles']['noRoleAssign'] + == 'No roles assigned directly to the host' + ) + + +class TestAnsibleREX: + """Test class for remote execution via Ansible + + :CaseComponent: Ansible-RemoteExecution """ + + @pytest.mark.tier2 + @pytest.mark.pit_server + @pytest.mark.no_containers + @pytest.mark.rhel_ver_match('[^6]') + def test_positive_config_report_ansible( + self, target_sat, module_org, module_ak_with_cv, rhel_contenthost + ): + """Test Config Report generation with Ansible Jobs. + + :id: 118e25e5-409e-44ba-b908-217da9722576 + + :steps: + 1. Register a content host with satellite + 2. Import a role into satellite + 3. Assign that role to a host + 4. Assert that the role was assigned to the host successfully + 5. Run the Ansible playbook associated with that role + 6. Check if the report is created successfully + + :expectedresults: + 1. Host should be assigned the proper role. + 2. Job report should be created. + """ + SELECTED_ROLE = 'RedHatInsights.insights-client' + if rhel_contenthost.os_version.major <= 7: + rhel_contenthost.create_custom_repos(rhel7=settings.repos.rhel7_os) + assert rhel_contenthost.execute('yum install -y insights-client').status == 0 + result = rhel_contenthost.register(module_org, None, module_ak_with_cv.name, target_sat) + assert result.status == 0, f'Failed to register host: {result.stderr}' + id = target_sat.nailgun_smart_proxy.id + target_host = rhel_contenthost.nailgun_host + target_sat.api.AnsibleRoles().sync(data={'proxy_id': id, 'role_names': [SELECTED_ROLE]}) + target_sat.cli.Host.ansible_roles_assign( + {'id': target_host.id, 'ansible-roles': SELECTED_ROLE} + ) + host_roles = target_host.list_ansible_roles() + assert host_roles[0]['name'] == SELECTED_ROLE + template_id = ( + target_sat.api.JobTemplate() + .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] + .id + ) + job = target_sat.api.JobInvocation().run( + synchronous=False, + data={ + 'job_template_id': template_id, + 'targeting_type': 'static_query', + 'search_query': f'name = {rhel_contenthost.hostname}', + }, + ) + target_sat.wait_for_tasks( + f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000 + ) + result = target_sat.api.JobInvocation(id=job['id']).read() + assert result.succeeded == 1 + with target_sat.ui_session() as session: + session.organization.select(module_org.name) + session.location.select(constants.DEFAULT_LOC) + assert session.host.search(target_host.name)[0]['Name'] == rhel_contenthost.hostname + session.configreport.search(rhel_contenthost.hostname) + session.configreport.delete(rhel_contenthost.hostname) + assert len(session.configreport.read()['table']) == 0 + + @pytest.mark.no_containers + @pytest.mark.rhel_ver_match('9') + def test_positive_ansible_custom_role( + self, target_sat, module_org, module_ak_with_cv, rhel_contenthost, request + ): + """ + Test Config report generation with Custom Ansible Role + + :id: 3551068a-ccfc-481c-b7ec-8fe2b8a802bf + + :steps: + 1. Register a content host with satellite + 2. Create a custom role and import into satellite + 3. Assign that role to a host + 4. Assert that the role was assigned to the host successfully + 5. Run the Ansible playbook associated with that role + 6. Check if the report is created successfully + + :expectedresults: + 1. Config report should be generated for a custom role run. + + :BZ: 2155392 + + :customerscenario: true + """ + + @request.addfinalizer + def _finalize(): + result = target_sat.cli.Ansible.roles_delete({'name': SELECTED_ROLE}) + assert f'Ansible role [{SELECTED_ROLE}] was deleted.' in result[0]['message'] + target_sat.execute('rm -rvf /etc/ansible/roles/custom_role') + + SELECTED_ROLE = 'custom_role' + playbook = f'{robottelo_tmp_dir}/playbook.yml' + data = { + 'name': 'Copy ssh keys', + 'copy': { + 'src': '/var/lib/foreman-proxy/ssh/{{ item }}', + 'dest': '/root/.ssh', + 'owner': 'root', + "group": 'root', + 'mode': '0400', + }, + 'with_items': ['id_rsa_foreman_proxy.pub', 'id_rsa_foreman_proxy'], + } + with open(playbook, 'w') as f: + yaml.dump(data, f, sort_keys=False, default_flow_style=False) + target_sat.execute('mkdir /etc/ansible/roles/custom_role') + target_sat.put(playbook, '/etc/ansible/roles/custom_role/playbook.yaml') + + result = rhel_contenthost.register(module_org, None, module_ak_with_cv.name, target_sat) + assert result.status == 0, f'Failed to register host: {result.stderr}' + proxy_id = target_sat.nailgun_smart_proxy.id + target_host = rhel_contenthost.nailgun_host + target_sat.api.AnsibleRoles().sync( + data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]} + ) + target_sat.cli.Host.ansible_roles_assign( + {'id': target_host.id, 'ansible-roles': SELECTED_ROLE} + ) + host_roles = target_host.list_ansible_roles() + assert host_roles[0]['name'] == SELECTED_ROLE + + template_id = ( + target_sat.api.JobTemplate() + .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] + .id + ) + job = target_sat.api.JobInvocation().run( + synchronous=False, + data={ + 'job_template_id': template_id, + 'targeting_type': 'static_query', + 'search_query': f'name = {rhel_contenthost.hostname}', + }, + ) + target_sat.wait_for_tasks( + f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000 + ) + result = target_sat.api.JobInvocation(id=job['id']).read() + assert result.succeeded == 1 + with target_sat.ui_session() as session: + session.organization.select(module_org.name) + session.location.select(constants.DEFAULT_LOC) + assert session.host.search(target_host.name)[0]['Name'] == rhel_contenthost.hostname + session.configreport.search(rhel_contenthost.hostname) + session.configreport.delete(rhel_contenthost.hostname) + assert len(session.configreport.read()['table']) == 0 + + @pytest.mark.stubbed + @pytest.mark.tier3 + def test_positive_ansible_job_check_mode(self): + """Run a job on a host with enable_roles_check_mode parameter enabled + + :id: 7aeb7253-e555-4e28-977f-71f16d3c32e2 + + :steps: + 1. Set the value of the ansible_roles_check_mode parameter to true on a host + 2. Associate one or more Ansible roles with the host + 3. Run Ansible roles against the host + + :expectedresults: Verify that the roles were run in check mode + (i.e. no changes were made on the host) + + :CaseAutomation: NotAutomated + """ + + @pytest.mark.stubbed + @pytest.mark.tier3 + def test_positive_install_ansible_collection_via_job_invocation(self): + """Verify that Ansible collections can be installed on hosts via job invocations + + :id: d4096aef-f6fc-41b6-ae56-d19b1f49cd42 + + :steps: + 1. Enable a host for remote execution + 2. Navigate to Hosts > Schedule Remote Job + 3. Select "Ansible Galaxy" as the job category + 4. Select "Ansible Collection - Install from Galaxy" as the job template + 5. Enter a collection in the ansible_collections_list field + 6. Click "Submit" + + :expectedresults: The Ansible collection is successfully installed on the host + + :CaseAutomation: NotAutomated + """ + + @pytest.mark.stubbed + @pytest.mark.tier2 + def test_positive_schedule_recurring_host_job(self): + """Using the new Host UI, schedule a recurring job on a Host + + :id: 5052be04-28ab-4349-8bee-851ef76e4ffa + + :steps: + 1. Register a RHEL host to Satellite. + 2. Import all roles available by default. + 3. Assign a role to host. + 4. Navigate to the new UI for the given Host. + 5. Select the Jobs subtab. + 6. Click the Schedule Recurring Job button, and using the popup, schedule a + recurring Job. + 7. Navigate to Job Invocations. + + :expectedresults: Scheduled Job appears in the Job Invocation list at the appointed time + """ + + @pytest.mark.stubbed + @pytest.mark.tier2 + def test_positive_schedule_recurring_hostgroup_job(self): + """Using the new recurring job scheduler, schedule a recurring job on a Hostgroup + + :id: c65db99b-11fe-4a32-89d0-0a4692b07efe + + :steps: + 1. Register a RHEL host to Satellite. + 2. Import all roles available by default. + 3. Assign a role to host. + 4. Navigate to the Host Group page. + 5. Select the "Configure Ansible Job" action. + 6. Click the Schedule Recurring Job button, and using the popup, schedule a + recurring Job. + 7. Navigate to Job Invocations. + + :expectedresults: Scheduled Job appears in the Job Invocation list at the appointed time + """ diff --git a/tests/foreman/ui/test_architecture.py b/tests/foreman/ui/test_architecture.py index 045d7f06cc6..151f860d9b1 100644 --- a/tests/foreman/ui/test_architecture.py +++ b/tests/foreman/ui/test_architecture.py @@ -11,6 +11,7 @@ :CaseImportance: Low """ + from fauxfactory import gen_string from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_audit.py b/tests/foreman/ui/test_audit.py index 5f6b87780d5..a5bc2b0dd0e 100644 --- a/tests/foreman/ui/test_audit.py +++ b/tests/foreman/ui/test_audit.py @@ -9,6 +9,7 @@ :Team: Endeavour """ + from fauxfactory import gen_string from nailgun import entities import pytest @@ -182,6 +183,6 @@ def test_positive_add_event(session, module_org): assert values['resource_type'] == 'KATELLO/CONTENT VIEW ENVIRONMENT' assert values['resource_name'] == f'{ENVIRONMENT}/{cv.name} / {cv.name}' assert len(values['action_summary']) == 1 - assert values['action_summary'][0]['column0'] == 'Added {}/{} to {}'.format( - ENVIRONMENT, cv.name, cv.name + assert ( + values['action_summary'][0]['column0'] == f'Added {ENVIRONMENT}/{cv.name} to {cv.name}' ) diff --git a/tests/foreman/ui/test_bookmarks.py b/tests/foreman/ui/test_bookmarks.py index 563a19102a7..0670563ab66 100644 --- a/tests/foreman/ui/test_bookmarks.py +++ b/tests/foreman/ui/test_bookmarks.py @@ -11,16 +11,19 @@ :CaseImportance: High """ -from airgun.exceptions import NoSuchElementException + +from airgun.exceptions import DisabledWidgetError, NoSuchElementException from fauxfactory import gen_string import pytest from robottelo.config import user_nailgun_config -from robottelo.constants import BOOKMARK_ENTITIES +from robottelo.constants import BOOKMARK_ENTITIES_SELECTION @pytest.fixture( - scope='module', params=BOOKMARK_ENTITIES, ids=(i['name'] for i in BOOKMARK_ENTITIES) + scope='module', + params=BOOKMARK_ENTITIES_SELECTION, + ids=(i['name'] for i in BOOKMARK_ENTITIES_SELECTION), ) def ui_entity(module_org, module_location, request): """Collects the list of all applicable UI entities for testing and does all @@ -256,8 +259,14 @@ def test_negative_create_with_duplicate_name(session, ui_entity, module_target_s existing_bookmark = session.bookmark.search(bookmark.name)[0] assert existing_bookmark['Name'] == bookmark.name ui_lib = getattr(session, ui_entity['name'].lower()) - # this fails but does not raise UI error, BZ#1992652 closed wontfix - ui_lib.create_bookmark({'name': bookmark.name, 'query': query, 'public': True}) + # this fails but does not raise UI error in old style dialog, BZ#1992652 closed + # wontfix, but new style dialog raises error, both situations occur + old_ui = ui_entity.get('old_ui') + if old_ui: + ui_lib.create_bookmark({'name': bookmark.name, 'query': query, 'public': True}) + else: + with pytest.raises((DisabledWidgetError, NoSuchElementException)): + ui_lib.create_bookmark({'name': bookmark.name, 'query': query, 'public': True}) # assert there are no duplicate bookmarks new_search = session.bookmark.search(bookmark.name) assert len(new_search) == 1 diff --git a/tests/foreman/ui/test_branding.py b/tests/foreman/ui/test_branding.py index 8fe30b1c2ef..0929285af18 100644 --- a/tests/foreman/ui/test_branding.py +++ b/tests/foreman/ui/test_branding.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest diff --git a/tests/foreman/ui/test_computeprofiles.py b/tests/foreman/ui/test_computeprofiles.py index 31075e7c1fb..9c15f1e42ae 100644 --- a/tests/foreman/ui/test_computeprofiles.py +++ b/tests/foreman/ui/test_computeprofiles.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string from nailgun import entities import pytest @@ -36,8 +37,8 @@ def test_positive_end_to_end(session, module_location, module_org): session.computeprofile.create({'name': name}) assert entities.ComputeProfile().search(query={'search': f'name={name}'}), ( - 'Compute profile {} expected to exist, but is not included in the search ' - 'results'.format(name) + f'Compute profile {name} expected to exist, but is not included in the search ' + 'results' ) compute_resource_list = session.computeprofile.list_resources(name) assert f'{compute_resource.name} (Libvirt)' in [ @@ -45,11 +46,11 @@ def test_positive_end_to_end(session, module_location, module_org): ] session.computeprofile.rename(name, {'name': new_name}) assert entities.ComputeProfile().search(query={'search': f'name={new_name}'}), ( - 'Compute profile {} expected to exist, but is not included in the search ' - 'results'.format(new_name) + f'Compute profile {new_name} expected to exist, but is not included in the search ' + 'results' ) session.computeprofile.delete(new_name) assert not entities.ComputeProfile().search(query={'search': f'name={new_name}'}), ( - 'Compute profile {} expected to be deleted, but is included in the search ' - 'results'.format(new_name) + f'Compute profile {new_name} expected to be deleted, but is included in the search ' + 'results' ) diff --git a/tests/foreman/ui/test_computeresource.py b/tests/foreman/ui/test_computeresource.py index 7bd9f2552ce..b9823f339a9 100644 --- a/tests/foreman/ui/test_computeresource.py +++ b/tests/foreman/ui/test_computeresource.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from nailgun import entities import pytest from wait_for import wait_for @@ -283,7 +284,6 @@ def test_positive_VM_import(session, module_org, module_location, rhev_data): name = gen_string('alpha') with session: - session.computeresource.create( { 'name': name, diff --git a/tests/foreman/ui/test_computeresource_azurerm.py b/tests/foreman/ui/test_computeresource_azurerm.py index 36228513ffa..5d09ad88a69 100644 --- a/tests/foreman/ui/test_computeresource_azurerm.py +++ b/tests/foreman/ui/test_computeresource_azurerm.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/ui/test_computeresource_ec2.py b/tests/foreman/ui/test_computeresource_ec2.py index 8f575bd4a80..ee554fb925f 100644 --- a/tests/foreman/ui/test_computeresource_ec2.py +++ b/tests/foreman/ui/test_computeresource_ec2.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_computeresource_gce.py b/tests/foreman/ui/test_computeresource_gce.py index a25edb1da44..4c48e902809 100644 --- a/tests/foreman/ui/test_computeresource_gce.py +++ b/tests/foreman/ui/test_computeresource_gce.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import json import random diff --git a/tests/foreman/ui/test_computeresource_libvirt.py b/tests/foreman/ui/test_computeresource_libvirt.py index 42477b8d46a..8328f1a4cf1 100644 --- a/tests/foreman/ui/test_computeresource_libvirt.py +++ b/tests/foreman/ui/test_computeresource_libvirt.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from random import choice from fauxfactory import gen_string diff --git a/tests/foreman/ui/test_computeresource_vmware.py b/tests/foreman/ui/test_computeresource_vmware.py index 833d8100e05..0549e07f65a 100644 --- a/tests/foreman/ui/test_computeresource_vmware.py +++ b/tests/foreman/ui/test_computeresource_vmware.py @@ -11,13 +11,14 @@ :CaseImportance: High """ + from math import floor, log10 from random import choice from nailgun import entities import pytest from wait_for import TimedOutError, wait_for -from wrapanapi.systems.virtualcenter import VMWareSystem, vim +from wrapanapi.systems.virtualcenter import vim from robottelo.config import settings from robottelo.constants import ( @@ -27,6 +28,7 @@ VMWARE_CONSTANTS, ) from robottelo.utils.datafactory import gen_string +from robottelo.utils.issue_handlers import is_open pytestmark = [pytest.mark.skip_if_not_set('vmware')] @@ -56,20 +58,18 @@ def _get_normalized_size(size): return f'{size} {suffixes[suffix_index]}' -def _get_vmware_datastore_summary_string(data_store_name=settings.vmware.datastore, vmware=None): +@pytest.fixture +def get_vmware_datastore_summary_string(vmware, vmwareclient): """Return the datastore string summary for data_store_name For "Local-Ironforge" datastore the string looks Like: "Local-Ironforge (free: 1.66 TB, prov: 2.29 TB, total: 2.72 TB)" """ - system = VMWareSystem( - hostname=vmware.hostname, - username=settings.vmware.username, - password=settings.vmware.password, - ) data_store_summary = [ - h for h in system.get_obj_list(vim.Datastore) if h.host and h.name == data_store_name + h + for h in vmwareclient.get_obj_list(vim.Datastore) + if h.host and h.name == settings.vmware.datastore ][0].summary uncommitted = data_store_summary.uncommitted or 0 capacity = _get_normalized_size(data_store_summary.capacity) @@ -77,7 +77,7 @@ def _get_vmware_datastore_summary_string(data_store_name=settings.vmware.datasto prov = _get_normalized_size( data_store_summary.capacity + uncommitted - data_store_summary.freeSpace ) - return f'{data_store_name} (free: {free_space}, prov: {prov}, total: {capacity})' + return f'{settings.vmware.datastore} (free: {free_space}, prov: {prov}, total: {capacity})' @pytest.mark.tier1 @@ -290,32 +290,65 @@ def test_positive_resource_vm_power_management(session, vmware): raise AssertionError('Timed out waiting for VM to toggle power state') from err +@pytest.mark.e2e +@pytest.mark.upgrade @pytest.mark.tier2 @pytest.mark.parametrize('vmware', ['vmware7', 'vmware8'], indirect=True) -def test_positive_select_vmware_custom_profile_guest_os_rhel7(session, vmware): - """Select custom default (3-Large) compute profile guest OS RHEL7. +def test_positive_vmware_custom_profile_end_to_end( + session, vmware, request, target_sat, get_vmware_datastore_summary_string +): + """Perform end to end testing for VMware compute profile. :id: 24f7bb5f-2aaf-48cb-9a56-d2d0713dfe3d :customerscenario: true - :setup: vmware hostname and credentials. - :steps: 1. Create a compute resource of type vmware. - 2. Provide valid hostname, username and password. - 3. Select the created vmware CR. - 4. Click Compute Profile tab. - 5. Select 3-Large profile - 6. Set Guest OS field to RHEL7 OS. + 2. Update a compute profile with all values - :expectedresults: Guest OS RHEL7 is selected successfully. + :expectedresults: Compute profiles are updated successfully with all the values. - :BZ: 1315277 + :BZ: 1315277, 2266672 """ cr_name = gen_string('alpha') - guest_os_name = 'Red Hat Enterprise Linux 7 (64-bit)' + guest_os_names = [ + 'Red Hat Enterprise Linux 7 (64-bit)', + 'Red Hat Enterprise Linux 8 (64 bit)', + 'Red Hat Enterprise Linux 9 (64 bit)', + ] + compute_profile = ['1-Small', '2-Medium', '3-Large'] + cpus = ['2', '4', '6'] + vm_memory = ['4000', '6000', '8000'] + annotation_notes = gen_string('alpha') + firmware_type = ['Automatic', 'BIOS', 'EFI'] + resource_pool = VMWARE_CONSTANTS['pool'] + folder = VMWARE_CONSTANTS['folder'] + virtual_hw_version = VMWARE_CONSTANTS['virtualhw_version'] + memory_hot_add = True + cpu_hot_add = True + cdrom_drive = True + disk_size = '10 GB' + network = 'VLAN 1001' # hardcoding network here as this test won't be doing actual provisioning + storage_data = { + 'storage': { + 'controller': VMWARE_CONSTANTS['scsicontroller'], + 'disks': [ + { + 'data_store': get_vmware_datastore_summary_string, + 'size': disk_size, + 'thin_provision': True, + } + ], + } + } + network_data = { + 'network_interfaces': { + 'nic_type': VMWARE_CONSTANTS['network_interface_name'], + 'network': network, + } + } with session: session.computeresource.create( { @@ -327,143 +360,65 @@ def test_positive_select_vmware_custom_profile_guest_os_rhel7(session, vmware): 'provider_content.datacenter.value': settings.vmware.datacenter, } ) - assert session.computeresource.search(cr_name)[0]['Name'] == cr_name - session.computeresource.update_computeprofile( - cr_name, COMPUTE_PROFILE_LARGE, {'provider_content.guest_os': guest_os_name} - ) - values = session.computeresource.read_computeprofile(cr_name, COMPUTE_PROFILE_LARGE) - assert values['provider_content']['guest_os'] == guest_os_name - - -@pytest.mark.tier2 -@pytest.mark.parametrize('vmware', ['vmware7', 'vmware8'], indirect=True) -def test_positive_access_vmware_with_custom_profile(session, vmware): - """Associate custom default (3-Large) compute profile - - :id: 751ef765-5091-4322-a0d9-0c9c73009cc4 - - :setup: vmware hostname and credentials. - :steps: - - 1. Create a compute resource of type vmware. - 2. Provide valid hostname, username and password. - 3. Select the created vmware CR. - 4. Click Compute Profile tab. - 5. Edit (3-Large) with valid configurations and submit. + @request.addfinalizer + def _finalize(): + cr = target_sat.api.VMWareComputeResource().search(query={'search': f'name={cr_name}'}) + if cr: + target_sat.api.VMWareComputeResource(id=cr[0].id).delete() - :expectedresults: The Compute Resource created and associated to compute profile (3-Large) - with provided values. - """ - cr_name = gen_string('alpha') - data_store_summary_string = _get_vmware_datastore_summary_string(vmware=vmware) - cr_profile_data = dict( - cpus='2', - cores_per_socket='2', - memory='1024', - firmware='EFI', - cluster=settings.vmware.cluster, - resource_pool=VMWARE_CONSTANTS.get('pool'), - folder=VMWARE_CONSTANTS.get('folder'), - guest_os=VMWARE_CONSTANTS.get('guest_os'), - virtual_hw_version=VMWARE_CONSTANTS.get('virtualhw_version'), - memory_hot_add=True, - cpu_hot_add=True, - cdrom_drive=True, - annotation_notes=gen_string('alpha'), - network_interfaces=[] - if not settings.provisioning.vlan_id - else [ - dict( - nic_type=VMWARE_CONSTANTS.get('network_interface_name'), - network='VLAN 1001', # hardcoding network here as these test won't be doing actual provisioning - ), - dict( - nic_type=VMWARE_CONSTANTS.get('network_interface_name'), - network='VLAN 1001', - ), - ], - storage=[ - dict( - controller=VMWARE_CONSTANTS.get('scsicontroller'), - disks=[ - dict( - data_store=data_store_summary_string, - size='10 GB', - thin_provision=True, - ), - dict( - data_store=data_store_summary_string, - size='20 GB', - thin_provision=False, - eager_zero=False, - ), - ], - ), - dict( - controller=VMWARE_CONSTANTS.get('scsicontroller'), - disks=[ - dict( - data_store=data_store_summary_string, - size='30 GB', - thin_provision=False, - eager_zero=True, - ) - ], - ), - ], - ) - with session: - session.computeresource.create( - { - 'name': cr_name, - 'provider': FOREMAN_PROVIDERS['vmware'], - 'provider_content.vcenter': vmware.hostname, - 'provider_content.user': settings.vmware.username, - 'provider_content.password': settings.vmware.password, - 'provider_content.datacenter.value': settings.vmware.datacenter, - } - ) assert session.computeresource.search(cr_name)[0]['Name'] == cr_name - session.computeresource.update_computeprofile( - cr_name, - COMPUTE_PROFILE_LARGE, - {f'provider_content.{key}': value for key, value in cr_profile_data.items()}, - ) - values = session.computeresource.read_computeprofile(cr_name, COMPUTE_PROFILE_LARGE) - provider_content = values['provider_content'] - # assert main compute resource profile data updated successfully. - excluded_keys = ['network_interfaces', 'storage'] - expected_value = { - key: value for key, value in cr_profile_data.items() if key not in excluded_keys - } - provided_value = { - key: value for key, value in provider_content.items() if key in expected_value - } - assert provided_value == expected_value - # assert compute resource profile network data updated successfully. - for network_index, expected_network_value in enumerate( - cr_profile_data['network_interfaces'] + for guest_os_name, cprofile, cpu, memory, firmware in zip( + guest_os_names, compute_profile, cpus, vm_memory, firmware_type, strict=True ): - provided_network_value = { - key: value - for key, value in provider_content['network_interfaces'][network_index].items() - if key in expected_network_value - } - assert provided_network_value == expected_network_value - # assert compute resource profile storage data updated successfully. - for controller_index, expected_controller_value in enumerate(cr_profile_data['storage']): - provided_controller_value = provider_content['storage'][controller_index] + session.computeresource.update_computeprofile( + cr_name, + cprofile, + { + 'provider_content.guest_os': guest_os_name, + 'provider_content.cpus': cpu, + 'provider_content.memory': memory, + 'provider_content.cluster': settings.vmware.cluster, + 'provider_content.annotation_notes': annotation_notes, + 'provider_content.virtual_hw_version': virtual_hw_version, + 'provider_content.firmware': firmware, + 'provider_content.resource_pool': resource_pool, + 'provider_content.folder': folder, + 'provider_content.memory_hot_add': memory_hot_add, + 'provider_content.cpu_hot_add': cpu_hot_add, + 'provider_content.cdrom_drive': cdrom_drive, + 'provider_content.storage': [value for value in storage_data.values()], + 'provider_content.network_interfaces': [ + value for value in network_data.values() + ], + }, + ) + values = session.computeresource.read_computeprofile(cr_name, cprofile) + provider_content = values['provider_content'] + assert provider_content['guest_os'] == guest_os_name + assert provider_content['cpus'] == cpu + assert provider_content['memory'] == memory + assert provider_content['cluster'] == settings.vmware.cluster + assert provider_content['annotation_notes'] == annotation_notes + assert provider_content['virtual_hw_version'] == virtual_hw_version + if not is_open('BZ:2266672'): + assert values['provider_content']['firmware'] == firmware + assert provider_content['resource_pool'] == resource_pool + assert provider_content['folder'] == folder + assert provider_content['memory_hot_add'] == memory_hot_add + assert provider_content['cpu_hot_add'] == cpu_hot_add + assert provider_content['cdrom_drive'] == cdrom_drive assert ( - provided_controller_value['controller'] == expected_controller_value['controller'] + provider_content['storage'][0]['controller'] == VMWARE_CONSTANTS['scsicontroller'] ) - for disk_index, expected_disk_value in enumerate(expected_controller_value['disks']): - provided_disk_value = { - key: value - for key, value in provided_controller_value['disks'][disk_index].items() - if key in expected_disk_value - } - assert provided_disk_value == expected_disk_value + assert provider_content['storage'][0]['disks'][0]['size'] == disk_size + assert ( + provider_content['network_interfaces'][0]['nic_type'] + == VMWARE_CONSTANTS['network_interface_name'] + ) + assert provider_content['network_interfaces'][0]['network'] == network + session.computeresource.delete(cr_name) + assert not session.computeresource.search(cr_name) @pytest.mark.tier2 diff --git a/tests/foreman/ui/test_config_group.py b/tests/foreman/ui/test_config_group.py index b38384d2977..34e6b32b9c9 100644 --- a/tests/foreman/ui/test_config_group.py +++ b/tests/foreman/ui/test_config_group.py @@ -11,6 +11,7 @@ :CaseImportance: Low """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/ui/test_containerimagetag.py b/tests/foreman/ui/test_containerimagetag.py index b46cc15bc4e..c8c7188e683 100644 --- a/tests/foreman/ui/test_containerimagetag.py +++ b/tests/foreman/ui/test_containerimagetag.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_contentcredentials.py b/tests/foreman/ui/test_contentcredentials.py index 4fa3f519986..42be72d12ec 100644 --- a/tests/foreman/ui/test_contentcredentials.py +++ b/tests/foreman/ui/test_contentcredentials.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/ui/test_contenthost.py b/tests/foreman/ui/test_contenthost.py index f0c33476198..57260382b77 100644 --- a/tests/foreman/ui/test_contenthost.py +++ b/tests/foreman/ui/test_contenthost.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from datetime import datetime, timedelta import re from urllib.parse import urlparse @@ -1717,7 +1718,7 @@ def test_pagination_multiple_hosts_multiple_pages(session, module_host_template, f'os = {module_host_template.operatingsystem.name}' ) # Assert dump of fake hosts found includes the higest numbered host created for this test - match = re.search(fr'test-{host_num:0>2}', str(all_fake_hosts_found)) + match = re.search(rf'test-{host_num:0>2}', str(all_fake_hosts_found)) assert match, 'Highest numbered host not found.' # Get all the pagination values pagination_values = session.contenthost.read_all('Pagination')['Pagination'] @@ -1755,8 +1756,8 @@ def test_search_for_virt_who_hypervisors(session, default_location, module_targe hypervisor_display_name = f'virt-who-{hypervisor_name}-{org.id}' # Search with hypervisor=True gives the correct result. assert ( - session.contenthost.search('hypervisor = true')[0]['Name'] - ) == hypervisor_display_name + (session.contenthost.search('hypervisor = true')[0]['Name']) == hypervisor_display_name + ) # Search with hypervisor=false gives the correct result. content_hosts = [host['Name'] for host in session.contenthost.search('hypervisor = false')] assert hypervisor_display_name not in content_hosts diff --git a/tests/foreman/ui/test_contentview.py b/tests/foreman/ui/test_contentview.py index 356c17e3fe0..f829546f1af 100644 --- a/tests/foreman/ui/test_contentview.py +++ b/tests/foreman/ui/test_contentview.py @@ -14,6 +14,7 @@ :CaseImportance: High """ + import datetime from random import randint diff --git a/tests/foreman/ui/test_dashboard.py b/tests/foreman/ui/test_dashboard.py index 9be16d4e6a9..ae5094daeeb 100644 --- a/tests/foreman/ui/test_dashboard.py +++ b/tests/foreman/ui/test_dashboard.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from nailgun.entity_mixins import TaskFailedError import pytest @@ -52,14 +53,14 @@ def test_positive_host_configuration_status(session, target_sat): 'Hosts with no reports', ] search_strings_list = [ - 'last_report > \"30 minutes ago\" and (status.applied > 0 or' + 'last_report > "30 minutes ago" and (status.applied > 0 or' ' status.restarted > 0) and (status.failed = 0)', - 'last_report > \"30 minutes ago\" and (status.failed > 0 or' + 'last_report > "30 minutes ago" and (status.failed > 0 or' ' status.failed_restarts > 0) and status.enabled = true', - 'last_report > \"30 minutes ago\" and status.enabled = true and' + 'last_report > "30 minutes ago" and status.enabled = true and' ' status.applied = 0 and status.failed = 0 and status.pending = 0', - 'last_report > \"30 minutes ago\" and status.pending > 0 and status.enabled = true', - 'last_report < \"30 minutes ago\" and status.enabled = true', + 'last_report > "30 minutes ago" and status.pending > 0 and status.enabled = true', + 'last_report < "30 minutes ago" and status.enabled = true', 'status.enabled = false', 'not has last_report and status.enabled = true', ] @@ -159,9 +160,7 @@ def test_positive_task_status(session, target_sat): tasks = session.task.read_all() total_items = session.task.total_items() assert total_items == int(tasks['StoppedChart']['table'][1]['Total']) - task_name = "Synchronize repository '{}'; product '{}'; organization '{}'".format( - repo.name, product.name, org.name - ) + task_name = f"Synchronize repository '{repo.name}'; product '{product.name}'; organization '{org.name}'" assert tasks['table'][0]['Action'] == task_name assert tasks['table'][0]['State'] == 'stopped' assert tasks['table'][0]['Result'] == 'warning' diff --git a/tests/foreman/ui/test_discoveredhost.py b/tests/foreman/ui/test_discoveredhost.py index 64476188f89..82fbdf5ca9d 100644 --- a/tests/foreman/ui/test_discoveredhost.py +++ b/tests/foreman/ui/test_discoveredhost.py @@ -9,6 +9,7 @@ :Team: Rocket """ + from fauxfactory import gen_string import pytest from wait_for import wait_for diff --git a/tests/foreman/ui/test_discoveryrule.py b/tests/foreman/ui/test_discoveryrule.py index 83ab23cbcc2..85958d57e53 100644 --- a/tests/foreman/ui/test_discoveryrule.py +++ b/tests/foreman/ui/test_discoveryrule.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_integer, gen_ipaddr, gen_string import pytest diff --git a/tests/foreman/ui/test_domain.py b/tests/foreman/ui/test_domain.py index 09e4864f07f..71314a0a67a 100644 --- a/tests/foreman/ui/test_domain.py +++ b/tests/foreman/ui/test_domain.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_eol_banner.py b/tests/foreman/ui/test_eol_banner.py new file mode 100644 index 00000000000..cc3c46eadf0 --- /dev/null +++ b/tests/foreman/ui/test_eol_banner.py @@ -0,0 +1,100 @@ +"""Test module for Dashboard UI + +:Requirement: Dashboard + +:CaseAutomation: Automated + +:CaseComponent: Dashboard + +:Team: Endeavour + +:CaseImportance: High + +""" + +from datetime import datetime, timedelta + +from airgun.session import Session +from navmazing import NavigationTriesExceeded +import pytest + +from robottelo.utils.datafactory import gen_string + + +def set_eol_date(target_sat, eol_date): + target_sat.execute( + rf'''sed -i "/end_of_life/c\ 'end_of_life': '{eol_date}'" /usr/share/satellite/lifecycle-metadata.yml''' + ) + target_sat.restart_services() + + +@pytest.mark.upgrade +@pytest.mark.run_in_one_thread +@pytest.mark.tier2 +def test_positive_eol_banner_e2e(session, target_sat, test_name): + """Check if the EOL banner is displayed correctly + + :id: 0ce6c11c-d969-4e7e-a934-cd1683de62a3 + + :Steps: + + 1. Set the EOL date witin 6 months, assert warning banner + 2. Check non-admin users can't see warning banner + 3. Dismiss banner + 4. Move EOL date to the past, assert error banner + 5. Check non-admin users can't see error banner + 6. Dismiss banner + + :expectedresults: Banner shows up when it should + """ + # non-admin user + username = gen_string('alpha') + password = gen_string('alpha') + target_sat.api.User(login=username, password=password, mail='test@example.com').create() + # admin user + admin_username = gen_string('alpha') + admin_password = gen_string('alpha') + target_sat.api.User( + login=admin_username, password=admin_password, admin=True, mail='admin@example.com' + ).create() + + # eol in 3 months + eol_date = (datetime.now() + timedelta(days=90)).strftime("%Y-%m-%d") + message_date = (datetime.now() + timedelta(days=90)).strftime("%B %Y") + set_eol_date(target_sat, eol_date) + + # non-admin can't see banner + with Session(test_name, username, password) as newsession: + with pytest.raises(NavigationTriesExceeded) as error: + newsession.eol_banner.read() + assert error.typename == 'NavigationTriesExceeded' + + # admin can see warning banner + with Session(test_name, admin_username, admin_password) as adminsession: + banner = adminsession.eol_banner.read() + assert message_date in banner["name"] + assert adminsession.eol_banner.is_warning() + adminsession.eol_banner.dismiss() + with pytest.raises(NavigationTriesExceeded) as error: + adminsession.eol_banner.read() + assert error.typename == 'NavigationTriesExceeded' + + # past eol_date + eol_date = (datetime.now() - timedelta(days=5)).strftime("%Y-%m-%d") + set_eol_date(target_sat, eol_date) + + # non-admin can't see danger banner + with Session(test_name, username, password) as newsession: + with pytest.raises(NavigationTriesExceeded) as error: + newsession.eol_banner.read() + assert error.typename == 'NavigationTriesExceeded' + + # admin can see danger banner + with Session(test_name, admin_username, admin_password) as adminsession: + banner = adminsession.eol_banner.read() + assert eol_date in banner["name"] + assert adminsession.eol_banner.is_danger() + adminsession.eol_banner.dismiss() + with pytest.raises(NavigationTriesExceeded) as error: + adminsession.eol_banner.read() + assert error.typename == 'NavigationTriesExceeded' diff --git a/tests/foreman/ui/test_errata.py b/tests/foreman/ui/test_errata.py index cbbd91d71b1..05a947e4bbe 100644 --- a/tests/foreman/ui/test_errata.py +++ b/tests/foreman/ui/test_errata.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from broker import Broker from fauxfactory import gen_string from manifester import Manifester diff --git a/tests/foreman/ui/test_hardwaremodel.py b/tests/foreman/ui/test_hardwaremodel.py index 38b42de6aac..82a97c6db9f 100644 --- a/tests/foreman/ui/test_hardwaremodel.py +++ b/tests/foreman/ui/test_hardwaremodel.py @@ -9,6 +9,7 @@ :Team: Endeavour """ + from fauxfactory import gen_string from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_host.py b/tests/foreman/ui/test_host.py index e3de3c10dd1..4197d1fa18d 100644 --- a/tests/foreman/ui/test_host.py +++ b/tests/foreman/ui/test_host.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import copy import csv import os @@ -1043,43 +1044,6 @@ def test_positive_read_details_page_from_new_ui(session, host_ui_options): assert values['overview']['details']['details']['comment'] == 'Host with fake data' -@pytest.mark.tier4 -@pytest.mark.rhel_ver_match('8') -def test_rex_new_ui(session, target_sat, rex_contenthost): - """Run remote execution using the new host details page - - :id: ee625595-4995-43b2-9e6d-633c9b33ff93 - - :steps: - 1. Navigate to Overview tab - 2. Schedule a job - 3. Wait for the job to finish - 4. Job is visible in Recent jobs card - - :expectedresults: Remote execution succeeded and the job is visible on Recent jobs card on - Overview tab - """ - hostname = rex_contenthost.hostname - job_args = { - 'job_category': 'Commands', - 'job_template': 'Run Command - Script Default', - 'template_content.command': 'ls', - } - with session: - session.location.select(loc_name=DEFAULT_LOC) - session.host_new.schedule_job(hostname, job_args) - task_result = target_sat.wait_for_tasks( - search_query=(f'Remote action: Run ls on {hostname}'), - search_rate=2, - max_tries=30, - ) - task_status = target_sat.api.ForemanTask(id=task_result[0].id).poll() - assert task_status['result'] == 'success' - recent_jobs = session.host_new.get_details(hostname, "overview.recent_jobs")['overview'] - assert recent_jobs['recent_jobs']['finished']['table'][0]['column0'] == "Run ls" - assert recent_jobs['recent_jobs']['finished']['table'][0]['column2'] == "succeeded" - - @pytest.mark.tier4 def test_positive_manage_table_columns(session, current_sat_org, current_sat_location): """Set custom columns of the hosts table. @@ -1115,8 +1079,8 @@ def test_positive_manage_table_columns(session, current_sat_org, current_sat_loc 'Recommendations': False, } with session: - session.organization.select(org_name=current_sat_org) - session.location.select(loc_name=current_sat_location) + session.organization.select(org_name=current_sat_org.name) + session.location.select(loc_name=current_sat_location.name) session.host.manage_table_columns(columns) displayed_columns = session.host.get_displayed_table_headers() for column, is_displayed in columns.items(): @@ -1147,8 +1111,8 @@ def test_positive_host_details_read_templates( host = target_sat.api.Host().search(query={'search': f'name={target_sat.hostname}'})[0] api_templates = [template['name'] for template in host.list_provisioning_templates()] with session: - session.organization.select(org_name=current_sat_org) - session.location.select(loc_name=current_sat_location) + session.organization.select(org_name=current_sat_org.name) + session.location.select(loc_name=current_sat_location.name) host_detail = session.host_new.get_details(target_sat.hostname, widget_names='details') ui_templates = [ row['column1'].strip() @@ -1928,3 +1892,29 @@ def test_change_content_source(session, change_content_source_prep, rhel_content rhel_contenthost_post_values['lifecycle_environment']['name'] == rhel_contenthost_post_values['lifecycle_environment']['name'] ) + + +@pytest.mark.tier3 +@pytest.mark.rhel_ver_match('8') +def test_positive_page_redirect_after_update(target_sat, current_sat_location): + """Check that page redirects correctly after editing a host without making any changes. + + :id: 29c3397e-0010-11ef-bca4-000c2989e153 + + :steps: + 1. Go to All Hosts page. + 2. Edit a host. Using the Sat. host is sufficient, no other host needs to be created or registered, + because we need just a host with FQDN. + 3. Submit the host edit dialog without making any changes. + + :expectedresults: The page should be redirected to the host details page. + + :BZ: 2166303 + """ + client = target_sat + with target_sat.ui_session() as session: + session.location.select(loc_name=current_sat_location.name) + session.host_new.update(client.hostname, {}) + + assert 'page-not-found' not in session.browser.url + assert client.hostname in session.browser.url diff --git a/tests/foreman/ui/test_hostcollection.py b/tests/foreman/ui/test_hostcollection.py index 06af17c4a5a..67d4b434ee7 100644 --- a/tests/foreman/ui/test_hostcollection.py +++ b/tests/foreman/ui/test_hostcollection.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import time from broker import Broker @@ -759,8 +760,8 @@ def test_positive_install_modular_errata( """ stream = "0" version = "20180704111719" - _module_install_command = 'dnf -y module install {}:{}:{}'.format( - constants.FAKE_4_CUSTOM_PACKAGE_NAME, stream, version + _module_install_command = ( + f'dnf -y module install {constants.FAKE_4_CUSTOM_PACKAGE_NAME}:{stream}:{version}' ) _run_remote_command_on_content_hosts(_module_install_command, vm_content_hosts_module_stream) _run_remote_command_on_content_hosts('dnf -y upload-profile', vm_content_hosts_module_stream) diff --git a/tests/foreman/ui/test_hostgroup.py b/tests/foreman/ui/test_hostgroup.py index 718b5091fca..74304b50ec0 100644 --- a/tests/foreman/ui/test_hostgroup.py +++ b/tests/foreman/ui/test_hostgroup.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_http_proxy.py b/tests/foreman/ui/test_http_proxy.py index 9e3f1e5b10b..625b4c6af3a 100644 --- a/tests/foreman/ui/test_http_proxy.py +++ b/tests/foreman/ui/test_http_proxy.py @@ -11,11 +11,24 @@ :CaseAutomation: Automated """ + +from box import Box from fauxfactory import gen_integer, gen_string, gen_url import pytest from robottelo.config import settings -from robottelo.constants import DOCKER_REPO_UPSTREAM_NAME, REPO_TYPE +from robottelo.constants import DOCKER_REPO_UPSTREAM_NAME, REPO_TYPE, REPOS +from robottelo.hosts import ProxyHostError + + +@pytest.fixture +def function_spec_char_user(target_sat, session_auth_proxy): + """Creates a user with special character password on the auth HTTP proxy""" + name = gen_string('alpha').lower() # lower! + passwd = gen_string('punctuation').replace("'", '') + session_auth_proxy.add_user(name, passwd) + yield Box(name=name, passwd=passwd) + session_auth_proxy.remove_user(name) @pytest.mark.tier2 @@ -26,8 +39,6 @@ def test_positive_create_update_delete(module_org, module_location, target_sat): :id: 0c7cdf3d-778f-427a-9a2f-42ad7c23aa15 :expectedresults: All expected CRUD actions finished successfully - - :CaseImportance: High """ http_proxy_name = gen_string('alpha', 15) updated_proxy_name = gen_string('alpha', 15) @@ -53,8 +64,8 @@ def test_positive_create_update_delete(module_org, module_location, target_sat): assert http_proxy_values['http_proxy']['name'] == http_proxy_name assert http_proxy_values['http_proxy']['url'] == http_proxy_url assert http_proxy_values['http_proxy']['username'] == username - assert http_proxy_values['locations']['resources']['assigned'][0] == module_location.name - assert http_proxy_values['organizations']['resources']['assigned'][0] == module_org.name + assert module_location.name in http_proxy_values['locations']['resources']['assigned'] + assert module_org.name in http_proxy_values['organizations']['resources']['assigned'] # Update http_proxy with new name session.http_proxy.update(http_proxy_name, {'http_proxy.name': updated_proxy_name}) assert session.http_proxy.search(updated_proxy_name)[0]['Name'] == updated_proxy_name @@ -198,7 +209,7 @@ def test_set_default_http_proxy(module_org, module_location, setting_update, tar :steps: 1. Navigate to Infrastructure > Http Proxies 2. Create a Http Proxy - 3. GoTo to Administer > Settings > content tab + 3. Go to Administer > Settings > Content tab 4. Update the "Default HTTP Proxy" with created above. 5. Update "Default HTTP Proxy" to "no global default". @@ -239,29 +250,30 @@ def test_set_default_http_proxy(module_org, module_location, setting_update, tar def test_check_http_proxy_value_repository_details( function_org, function_location, function_product, setting_update, target_sat ): - """Deleted Global Http Proxy is reflected in repository details page". + """Global Http Proxy is reflected in repository details page". :id: 3f64255a-ef6c-4acb-b99b-e5579133b564 :steps: 1. Create Http Proxy (Go to Infrastructure > Http Proxies > New Http Proxy) - 2. GoTo to Administer > Settings > content tab + 2. Go to Administer > Settings > Content tab 3. Update the "Default HTTP Proxy" with created above. - 4. Create repository with Global Default Http Proxy. - 5. Delete the Http Proxy + 4. Create repository, check the Global Default Http Proxy is used. + 5. Delete the Http Proxy. + 6. Check it no longer appears on the Settings and repository page. :BZ: 1820193 :parametrized: yes :expectedresults: - 1. After deletion of "Default Http Proxy" its field on settings page should be - set to no global defult - 2. "HTTP Proxy" field in repository details page should be set to Global Default (None). + 1. Repository is automatically created with relevant Global Default Http Proxy. + 2. After Http Proxy deletion + - its field on Settings page should be set to Empty. + - "HTTP Proxy" field in repository details page should be set to Global Default (None). :CaseImportance: Medium """ - property_name = setting_update.name repo_name = gen_string('alpha') http_proxy_a = target_sat.api.HTTPProxy( @@ -285,45 +297,102 @@ def test_check_http_proxy_value_repository_details( 'repo_content.upstream_url': settings.repos.yum_0.url, }, ) + repo_values = session.repository.read(function_product.name, repo_name) + assert ( + repo_values['repo_content']['http_proxy_policy'] + == f'Global Default ({http_proxy_a.name})' + ) + session.http_proxy.delete(http_proxy_a.name) result = session.settings.read(f'name = {property_name}') assert result['table'][0]['Value'] == "Empty" - session.repository.search(function_product.name, repo_name)[0]['Name'] repo_values = session.repository.read(function_product.name, repo_name) assert repo_values['repo_content']['http_proxy_policy'] == 'Global Default (None)' @pytest.mark.tier3 @pytest.mark.run_in_one_thread -@pytest.mark.stubbed -def test_http_proxy_containing_special_characters(): +def test_http_proxy_containing_special_characters( + request, + target_sat, + session_auth_proxy, + function_spec_char_user, + module_sca_manifest_org, + default_location, +): """Test Manifest refresh and redhat repository sync with http proxy special characters in password. :id: 16082c6a-9320-4a9a-bd6c-5687b099c940 - :customerscenario: true + :setup: + 1. Have an authenticated HTTP proxy. + 2. At the Proxy side create a user with special characters in password + (via function_spec_user fixture), let's call him the spec-char user. :steps: - 1. Navigate to Infrastructure > Http Proxies - 2. Create HTTP Proxy with special characters in password. - 3. Go To to Administer > Settings > content tab - 4. Fill the details related to HTTP Proxy and click on "Test connection" button. - 5. Update the "Default HTTP Proxy" with created above. - 6. Refresh manifest. - 7. Enable and sync any redhat repositories. - - :BZ: 1844840 + 1. Check that no logs exist for the spec-char user at the proxy side yet. + 2. Create a proxy via UI using the spec-char user. + 3. Update settings to use the proxy for the content ops. + 4. Refresh the manifest, check it went through the proxy. + 5. Enable and sync some RH repository, check it went through the proxy. :expectedresults: - 1. "Test connection" button workes as expected. - 2. Manifest refresh, repository enable/disable and repository sync operation - finished successfully. + 1. HTTP proxy can be created via UI using the spec-char user. + 2. Manifest refresh, repository enable and sync succeed and are performed + through the HTTP proxy. - :CaseAutomation: NotAutomated + :BZ: 1844840 - :CaseImportance: High + :customerscenario: true """ + # Check that no logs exist for the spec-char user at the proxy side yet. + with pytest.raises(ProxyHostError): + session_auth_proxy.get_log(tail=100, grep=function_spec_char_user.name) + + # Create a proxy via UI using the spec-char user. + proxy_name = gen_string('alpha') + with target_sat.ui_session() as session: + session.organization.select(org_name=module_sca_manifest_org.name) + session.http_proxy.create( + { + 'http_proxy.name': proxy_name, + 'http_proxy.url': settings.http_proxy.auth_proxy_url, + 'http_proxy.username': function_spec_char_user.name, + 'http_proxy.password': function_spec_char_user.passwd, + 'locations.resources.assigned': [default_location.name], + 'organizations.resources.assigned': [module_sca_manifest_org.name], + } + ) + request.addfinalizer( + lambda: target_sat.api.HTTPProxy() + .search(query={'search': f'name={proxy_name}'})[0] + .delete() + ) + + # Update settings to use the proxy for the content ops. + session.settings.update( + 'name = content_default_http_proxy', + f'{proxy_name} ({settings.http_proxy.auth_proxy_url})', + ) + + # Refresh the manifest, check it went through the proxy. + target_sat.cli.Subscription.refresh_manifest( + {'organization-id': module_sca_manifest_org.id} + ) + assert session_auth_proxy.get_log( + tail=100, grep=f'CONNECT subscription.rhsm.redhat.com.*{function_spec_char_user.name}' + ), 'RHSM connection not found in proxy log' + + # Enable and sync some RH repository, check it went through the proxy. + repo_id = target_sat.api_factory.enable_sync_redhat_repo( + REPOS['rhae2'], module_sca_manifest_org.id + ) + repo = target_sat.api.Repository(id=repo_id).read() + assert session_auth_proxy.get_log( + tail=100, grep=f'CONNECT cdn.redhat.com.*{function_spec_char_user.name}' + ), 'CDN connection not found in proxy log' + assert repo.content_counts['rpm'] > 0, 'Where is my content?!' @pytest.mark.tier2 diff --git a/tests/foreman/ui/test_jobinvocation.py b/tests/foreman/ui/test_jobinvocation.py deleted file mode 100644 index d79d7ee3355..00000000000 --- a/tests/foreman/ui/test_jobinvocation.py +++ /dev/null @@ -1,171 +0,0 @@ -"""Test class for Job Invocation procedure - -:Requirement: JobInvocation - -:CaseAutomation: Automated - -:CaseComponent: RemoteExecution - -:Team: Endeavour - -:CaseImportance: High - -""" -from inflection import camelize -import pytest - -from robottelo.utils.datafactory import gen_string - - -@pytest.fixture -def module_rhel_client_by_ip(module_org, smart_proxy_location, rhel7_contenthost, target_sat): - """Setup a broker rhel client to be used in remote execution by ip""" - rhel7_contenthost.configure_rex(satellite=target_sat, org=module_org) - target_sat.api_factory.update_vm_host_location( - rhel7_contenthost, location_id=smart_proxy_location.id - ) - return rhel7_contenthost - - -@pytest.mark.tier4 -def test_positive_run_default_job_template_by_ip( - session, module_org, smart_proxy_location, module_rhel_client_by_ip -): - """Run a job template on a host connected by ip - - :id: 9a90aa9a-00b4-460e-b7e6-250360ee8e4d - - :Setup: Use pre-defined job template. - - :steps: - - 1. Set remote_execution_connect_by_ip on host to true - 2. Navigate to an individual host and click Run Job - 3. Select the job and appropriate template - 4. Run the job - - :expectedresults: Verify the job was successfully ran against the host - - :parametrized: yes - """ - hostname = module_rhel_client_by_ip.hostname - with session: - session.organization.select(module_org.name) - session.location.select(smart_proxy_location.name) - assert session.host.search(hostname)[0]['Name'] == hostname - session.jobinvocation.run( - { - 'job_category': 'Commands', - 'job_template': 'Run Command - Script Default', - 'search_query': f'name ^ {hostname}', - 'template_content.command': 'ls', - } - ) - session.jobinvocation.wait_job_invocation_state(entity_name='Run ls', host_name=hostname) - status = session.jobinvocation.read(entity_name='Run ls', host_name=hostname) - assert status['overview']['hosts_table'][0]['Status'] == 'success' - - -@pytest.mark.tier4 -def test_positive_run_custom_job_template_by_ip( - session, module_org, smart_proxy_location, module_rhel_client_by_ip -): - """Run a job template on a host connected by ip - - :id: e283ae09-8b14-4ce1-9a76-c1bbd511d58c - - :Setup: Create a working job template. - - :steps: - - 1. Set remote_execution_connect_by_ip on host to true - 2. Navigate to an individual host and click Run Job - 3. Select the job and appropriate template - 4. Run the job - - :expectedresults: Verify the job was successfully ran against the host - - :parametrized: yes - """ - hostname = module_rhel_client_by_ip.hostname - job_template_name = gen_string('alpha') - with session: - session.organization.select(module_org.name) - session.location.select(smart_proxy_location.name) - assert session.host.search(hostname)[0]['Name'] == hostname - session.jobtemplate.create( - { - 'template.name': job_template_name, - 'template.template_editor.rendering_options': 'Editor', - 'template.template_editor.editor': '<%= input("command") %>', - 'job.provider_type': 'Script', - 'inputs': [{'name': 'command', 'required': True, 'input_type': 'User input'}], - } - ) - assert session.jobtemplate.search(job_template_name)[0]['Name'] == job_template_name - session.jobinvocation.run( - { - 'job_category': 'Miscellaneous', - 'job_template': job_template_name, - 'search_query': f'name ^ {hostname}', - 'template_content.command': 'ls', - } - ) - job_description = f'{camelize(job_template_name.lower())} with inputs command="ls"' - session.jobinvocation.wait_job_invocation_state( - entity_name=job_description, host_name=hostname - ) - status = session.jobinvocation.read(entity_name=job_description, host_name=hostname) - assert status['overview']['hosts_table'][0]['Status'] == 'success' - - -@pytest.mark.stubbed -@pytest.mark.tier2 -def test_positive_schedule_recurring_host_job(self): - """Using the new Host UI, schedule a recurring job on a Host - - :id: 5052be04-28ab-4349-8bee-851ef76e4ffa - - :caseComponent: Ansible-RemoteExecution - - :Team: Rocket - - :steps: - 1. Register a RHEL host to Satellite. - 2. Import all roles available by default. - 3. Assign a role to host. - 4. Navigate to the new UI for the given Host. - 5. Select the Jobs subtab. - 6. Click the Schedule Recurring Job button, and using the popup, schedule a - recurring Job. - 7. Navigate to Job Invocations. - - :expectedresults: The scheduled Job appears in the Job Invocation list at the appointed - time - """ - - -@pytest.mark.stubbed -@pytest.mark.tier2 -def test_positive_schedule_recurring_hostgroup_job(self): - """Using the new recurring job scheduler, schedule a recurring job on a Hostgroup - - :id: c65db99b-11fe-4a32-89d0-0a4692b07efe - - :caseComponent: Ansible-RemoteExecution - - :Team: Rocket - - :steps: - 1. Register a RHEL host to Satellite. - 2. Import all roles available by default. - 3. Assign a role to host. - 4. Navigate to the Host Group page. - 5. Select the "Configure Ansible Job" action. - 6. Click the Schedule Recurring Job button, and using the popup, schedule a - recurring Job. - 7. Navigate to Job Invocations. - - :expectedresults: The scheduled Job appears in the Job Invocation list at the appointed - time - """ diff --git a/tests/foreman/ui/test_jobtemplate.py b/tests/foreman/ui/test_jobtemplate.py index e2be74da640..c4ad3fa6ccb 100644 --- a/tests/foreman/ui/test_jobtemplate.py +++ b/tests/foreman/ui/test_jobtemplate.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/ui/test_ldap_authentication.py b/tests/foreman/ui/test_ldap_authentication.py index a6e3af6d612..15f5d0d218a 100644 --- a/tests/foreman/ui/test_ldap_authentication.py +++ b/tests/foreman/ui/test_ldap_authentication.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import os from fauxfactory import gen_url @@ -404,9 +405,12 @@ def test_positive_delete_external_roles( session.usergroup.update( ldap_usergroup_name, {'roles.resources.unassigned': [foreman_role.name]} ) - with target_sat.ui_session( - test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] - ) as ldapsession, pytest.raises(NavigationTriesExceeded): + with ( + target_sat.ui_session( + test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] + ) as ldapsession, + pytest.raises(NavigationTriesExceeded), + ): ldapsession.location.create({'name': gen_string('alpha')}) @@ -758,9 +762,12 @@ def test_positive_login_user_basic_roles( role = target_sat.api.Role().create() permissions = {'Architecture': PERMISSIONS['Architecture']} target_sat.api_factory.create_role_permissions(role, permissions) - with target_sat.ui_session( - test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] - ) as ldapsession, pytest.raises(NavigationTriesExceeded): + with ( + target_sat.ui_session( + test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] + ) as ldapsession, + pytest.raises(NavigationTriesExceeded), + ): ldapsession.usergroup.search('') with session: session.user.update(ldap_data['ldap_user_name'], {'roles.resources.assigned': [role.name]}) @@ -792,9 +799,12 @@ def test_positive_login_user_password_otp( otp_pass = ( f"{default_ipa_host.ldap_user_passwd}{generate_otp(default_ipa_host.time_based_secret)}" ) - with target_sat.ui_session( - test_name, default_ipa_host.ipa_otp_username, otp_pass - ) as ldapsession, pytest.raises(NavigationTriesExceeded): + with ( + target_sat.ui_session( + test_name, default_ipa_host.ipa_otp_username, otp_pass + ) as ldapsession, + pytest.raises(NavigationTriesExceeded), + ): ldapsession.user.search('') users = target_sat.api.User().search( query={'search': f'login="{default_ipa_host.ipa_otp_username}"'} @@ -1210,11 +1220,14 @@ def test_userlist_with_external_admin( assert idm_user in ldapsession.task.read_all()['current_user'] # verify the users count with local admin and remote/external admin - with target_sat.ui_session( - user=idm_admin, password=settings.server.ssh_password - ) as remote_admin_session, target_sat.ui_session( - user=settings.server.admin_username, password=settings.server.admin_password - ) as local_admin_session: + with ( + target_sat.ui_session( + user=idm_admin, password=settings.server.ssh_password + ) as remote_admin_session, + target_sat.ui_session( + user=settings.server.admin_username, password=settings.server.admin_password + ) as local_admin_session, + ): assert local_admin_session.user.search(idm_user)[0]['Username'] == idm_user assert remote_admin_session.user.search(idm_user)[0]['Username'] == idm_user diff --git a/tests/foreman/ui/test_lifecycleenvironment.py b/tests/foreman/ui/test_lifecycleenvironment.py index 9c389fdd19d..f3c966bd10d 100644 --- a/tests/foreman/ui/test_lifecycleenvironment.py +++ b/tests/foreman/ui/test_lifecycleenvironment.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from navmazing import NavigationTriesExceeded import pytest diff --git a/tests/foreman/ui/test_location.py b/tests/foreman/ui/test_location.py index 98d2d2a8749..dc0680a8401 100644 --- a/tests/foreman/ui/test_location.py +++ b/tests/foreman/ui/test_location.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_ipaddr, gen_string from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_media.py b/tests/foreman/ui/test_media.py index 75a39f80edb..5c41ab40676 100644 --- a/tests/foreman/ui/test_media.py +++ b/tests/foreman/ui/test_media.py @@ -11,6 +11,7 @@ :CaseImportance: Low """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/ui/test_modulestreams.py b/tests/foreman/ui/test_modulestreams.py index f8a67ed15c6..126b0c3f442 100644 --- a/tests/foreman/ui/test_modulestreams.py +++ b/tests/foreman/ui/test_modulestreams.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_operatingsystem.py b/tests/foreman/ui/test_operatingsystem.py index 1324b556230..71a030772c6 100644 --- a/tests/foreman/ui/test_operatingsystem.py +++ b/tests/foreman/ui/test_operatingsystem.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.constants import HASH_TYPE diff --git a/tests/foreman/ui/test_organization.py b/tests/foreman/ui/test_organization.py index 3aa913484fb..c128b533b7b 100644 --- a/tests/foreman/ui/test_organization.py +++ b/tests/foreman/ui/test_organization.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_oscapcontent.py b/tests/foreman/ui/test_oscapcontent.py index 5669fb9e854..cb580c4c857 100644 --- a/tests/foreman/ui/test_oscapcontent.py +++ b/tests/foreman/ui/test_oscapcontent.py @@ -11,24 +11,13 @@ :CaseImportance: High """ -import os import pytest -from robottelo.config import robottelo_tmp_dir, settings from robottelo.constants import DataFile from robottelo.utils.datafactory import gen_string -@pytest.fixture(scope='module') -def oscap_content_path(module_target_sat): - _, file_name = os.path.split(settings.oscap.content_path) - - local_file = robottelo_tmp_dir.joinpath(file_name) - module_target_sat.get(remote_path=settings.oscap.content_path, local_path=str(local_file)) - return local_file - - @pytest.mark.skip_if_open("BZ:2167937") @pytest.mark.skip_if_open("BZ:2133151") @pytest.mark.tier1 diff --git a/tests/foreman/ui/test_oscappolicy.py b/tests/foreman/ui/test_oscappolicy.py index 712e6f37481..83c7e8be058 100644 --- a/tests/foreman/ui/test_oscappolicy.py +++ b/tests/foreman/ui/test_oscappolicy.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_oscaptailoringfile.py b/tests/foreman/ui/test_oscaptailoringfile.py index 0f6bdcdfc63..5b1dd6c8606 100644 --- a/tests/foreman/ui/test_oscaptailoringfile.py +++ b/tests/foreman/ui/test_oscaptailoringfile.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_package.py b/tests/foreman/ui/test_package.py index 643608811d1..a66ff706dbb 100644 --- a/tests/foreman/ui/test_package.py +++ b/tests/foreman/ui/test_package.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_partitiontable.py b/tests/foreman/ui/test_partitiontable.py index 127dc1ec67f..b8f0416331f 100644 --- a/tests/foreman/ui/test_partitiontable.py +++ b/tests/foreman/ui/test_partitiontable.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/ui/test_product.py b/tests/foreman/ui/test_product.py index 71a468ccb8e..1c6e986a527 100644 --- a/tests/foreman/ui/test_product.py +++ b/tests/foreman/ui/test_product.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from datetime import timedelta from fauxfactory import gen_choice diff --git a/tests/foreman/ui/test_provisioningtemplate.py b/tests/foreman/ui/test_provisioningtemplate.py index da2b8446dce..7ad306e2465 100644 --- a/tests/foreman/ui/test_provisioningtemplate.py +++ b/tests/foreman/ui/test_provisioningtemplate.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.constants import DataFile diff --git a/tests/foreman/ui/test_puppetclass.py b/tests/foreman/ui/test_puppetclass.py index b44ad73e7a7..42ae2b359ab 100644 --- a/tests/foreman/ui/test_puppetclass.py +++ b/tests/foreman/ui/test_puppetclass.py @@ -11,6 +11,7 @@ :CaseImportance: Low """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/ui/test_puppetenvironment.py b/tests/foreman/ui/test_puppetenvironment.py index 57502a51191..6add5508549 100644 --- a/tests/foreman/ui/test_puppetenvironment.py +++ b/tests/foreman/ui/test_puppetenvironment.py @@ -11,6 +11,7 @@ :CaseImportance: Low """ + import pytest from robottelo.constants import DEFAULT_CV, ENVIRONMENT diff --git a/tests/foreman/ui/test_registration.py b/tests/foreman/ui/test_registration.py index d26c974c1ad..5a8ec85e822 100644 --- a/tests/foreman/ui/test_registration.py +++ b/tests/foreman/ui/test_registration.py @@ -10,6 +10,7 @@ :Team: Rocket """ + from datetime import datetime import re @@ -528,7 +529,6 @@ def test_positive_host_registration_with_non_admin_user( target_sat.cli.User.add_role({'id': user.id, 'role-id': role['id']}) with Session(test_name, user=user.login, password=user_password) as session: - cmd = session.host_new.get_register_command( { 'general.insecure': True, diff --git a/tests/foreman/ui/test_remoteexecution.py b/tests/foreman/ui/test_remoteexecution.py index cdfe9c7ec4e..396a707c771 100644 --- a/tests/foreman/ui/test_remoteexecution.py +++ b/tests/foreman/ui/test_remoteexecution.py @@ -1,4 +1,4 @@ -"""Test class for Remote Execution Management UI +"""Test class for Job Invocation procedure :Requirement: Remoteexecution @@ -11,20 +11,27 @@ :CaseImportance: High """ + import datetime import time +from inflection import camelize import pytest from wait_for import wait_for -from robottelo.utils.datafactory import gen_string +from robottelo.utils.datafactory import ( + gen_string, +) -@pytest.mark.skip_if_open('BZ:2182353') @pytest.mark.rhel_ver_match('8') -@pytest.mark.tier3 -def test_positive_run_default_job_template_by_ip(session, rex_contenthost, module_org): - """Run a job template against a single host by ip +def test_positive_run_default_job_template( + session, + target_sat, + rex_contenthost, + module_org, +): + """Run a job template on a host :id: a21eac46-1a22-472d-b4ce-66097159a868 @@ -32,38 +39,38 @@ def test_positive_run_default_job_template_by_ip(session, rex_contenthost, modul :steps: - 1. Navigate to an individual host and click Run Job - 2. Select the job and appropriate template - 3. Run the job + 1. Get contenthost with rex enabled + 2. Navigate to an individual host and click Run Job + 3. Select the job and appropriate template + 4. Run the job - :expectedresults: Verify the job was successfully ran against the host + :expectedresults: Verify the job was successfully ran against the host, check also using the job widget on the main dashboard :parametrized: yes - :bz: 1898656 + :bz: 1898656, 2182353 :customerscenario: true """ + hostname = rex_contenthost.hostname - with session: + + with target_sat.ui_session() as session: session.organization.select(module_org.name) - session.location.select('Default Location') assert session.host.search(hostname)[0]['Name'] == hostname command = 'ls' - job_status = session.host.schedule_remote_job( - [hostname], + session.jobinvocation.run( { 'category_and_template.job_category': 'Commands', 'category_and_template.job_template': 'Run Command - Script Default', + 'target_hosts_and_inputs.targetting_type': 'Hosts', + 'target_hosts_and_inputs.targets': hostname, 'target_hosts_and_inputs.command': command, - 'advanced_fields.execution_order_randomized': True, - 'schedule.immediate': True, - }, + } ) - assert job_status['overview']['job_status'] == 'Success' - assert job_status['overview']['execution_order'] == 'Execution order: randomized' - assert job_status['overview']['hosts_table'][0]['Host'] == hostname - assert job_status['overview']['hosts_table'][0]['Status'] == 'success' + session.jobinvocation.wait_job_invocation_state(entity_name='Run ls', host_name=hostname) + status = session.jobinvocation.read(entity_name='Run ls', host_name=hostname) + assert status['overview']['hosts_table'][0]['Status'] == 'success' # check status also on the job dashboard job_name = f'Run {command}' @@ -73,16 +80,54 @@ def test_positive_run_default_job_template_by_ip(session, rex_contenthost, modul assert job_name in [job['Name'] for job in success_jobs] -@pytest.mark.skip_if_open('BZ:2182353') +@pytest.mark.tier4 +@pytest.mark.rhel_ver_match('8') +def test_rex_through_host_details(session, target_sat, rex_contenthost, module_org): + """Run remote execution using the new host details page + + :id: ee625595-4995-43b2-9e6d-633c9b33ff93 + + :steps: + 1. Navigate to Overview tab + 2. Schedule a job + 3. Wait for the job to finish + 4. Job is visible in Recent jobs card + + :expectedresults: Remote execution succeeded and the job is visible on Recent jobs card on + Overview tab + """ + + hostname = rex_contenthost.hostname + + job_args = { + 'category_and_template.job_category': 'Commands', + 'category_and_template.job_template': 'Run Command - Script Default', + 'target_hosts_and_inputs.command': 'ls', + } + with target_sat.ui_session() as session: + session.organization.select(module_org.name) + session.host_new.schedule_job(hostname, job_args) + task_result = target_sat.wait_for_tasks( + search_query=(f'Remote action: Run ls on {hostname}'), + search_rate=2, + max_tries=30, + ) + task_status = target_sat.api.ForemanTask(id=task_result[0].id).poll() + assert task_status['result'] == 'success' + recent_jobs = session.host_new.get_details(hostname, "overview.recent_jobs")['overview'] + assert recent_jobs['recent_jobs']['finished']['table'][0]['column0'] == "Run ls" + assert recent_jobs['recent_jobs']['finished']['table'][0]['column2'] == "succeeded" + + +@pytest.mark.tier4 @pytest.mark.rhel_ver_match('8') -@pytest.mark.tier3 @pytest.mark.parametrize( 'ui_user', [{'admin': True}, {'admin': False}], indirect=True, ids=['adminuser', 'nonadminuser'] ) -def test_positive_run_custom_job_template_by_ip( - session, module_org, target_sat, default_location, ui_user, rex_contenthost +def test_positive_run_custom_job_template( + session, module_org, default_location, target_sat, ui_user, rex_contenthost ): - """Run a job template on a host connected by ip + """Run a job template on a host :id: 3a59eb15-67c4-46e1-ba5f-203496ec0b0c @@ -103,13 +148,14 @@ def test_positive_run_custom_job_template_by_ip( :customerscenario: true """ + + hostname = rex_contenthost.hostname ui_user.location.append(target_sat.api.Location(id=default_location.id)) ui_user.update(['location']) - hostname = rex_contenthost.hostname job_template_name = gen_string('alpha') - with session: + with target_sat.ui_session() as session: session.organization.select(module_org.name) - session.location.select('Default Location') + assert session.host.search(hostname)[0]['Name'] == hostname session.jobtemplate.create( { 'template.name': job_template_name, @@ -120,29 +166,29 @@ def test_positive_run_custom_job_template_by_ip( } ) assert session.jobtemplate.search(job_template_name)[0]['Name'] == job_template_name - assert session.host.search(hostname)[0]['Name'] == hostname - job_status = session.host.schedule_remote_job( - [hostname], + session.jobinvocation.run( { 'category_and_template.job_category': 'Miscellaneous', 'category_and_template.job_template': job_template_name, + 'target_hosts_and_inputs.targets': hostname, 'target_hosts_and_inputs.command': 'ls', - 'schedule.immediate': True, - }, + } ) - assert job_status['overview']['job_status'] == 'Success' - assert job_status['overview']['hosts_table'][0]['Host'] == hostname - assert job_status['overview']['hosts_table'][0]['Status'] == 'success' + job_description = f'{camelize(job_template_name.lower())} with inputs command="ls"' + session.jobinvocation.wait_job_invocation_state( + entity_name=job_description, host_name=hostname + ) + status = session.jobinvocation.read(entity_name=job_description, host_name=hostname) + assert status['overview']['hosts_table'][0]['Status'] == 'success' -@pytest.mark.skip_if_open('BZ:2182353') @pytest.mark.upgrade @pytest.mark.tier3 @pytest.mark.rhel_ver_list([8]) -def test_positive_run_job_template_multiple_hosts_by_ip( - session, module_org, target_sat, registered_hosts +def test_positive_run_job_template_multiple_hosts( + session, module_org, target_sat, rex_contenthosts ): - """Run a job template against multiple hosts by ip + """Run a job template against multiple hosts :id: c4439ec0-bb80-47f6-bc31-fa7193bfbeeb @@ -158,22 +204,24 @@ def test_positive_run_job_template_multiple_hosts_by_ip( :expectedresults: Verify the job was successfully ran against the hosts """ + host_names = [] - for vm in registered_hosts: + for vm in rex_contenthosts: + # for vm in rex_contenthost: host_names.append(vm.hostname) vm.configure_rex(satellite=target_sat, org=module_org) - with session: + with target_sat.ui_session() as session: session.organization.select(module_org.name) - session.location.select('Default Location') - hosts = session.host.search(' or '.join([f'name="{hostname}"' for hostname in host_names])) - assert {host['Name'] for host in hosts} == set(host_names) + # session.location.select('Default Location') + for host in host_names: + assert session.host.search(host)[0]['Name'] == host + session.host.reset_search() job_status = session.host.schedule_remote_job( host_names, { 'category_and_template.job_category': 'Commands', 'category_and_template.job_template': 'Run Command - Script Default', - 'target_hosts_and_inputs.command': 'ls', - 'schedule.immediate': True, + 'target_hosts_and_inputs.command': 'sleep 5', }, ) assert job_status['overview']['job_status'] == 'Success' @@ -185,7 +233,6 @@ def test_positive_run_job_template_multiple_hosts_by_ip( ) -@pytest.mark.skip_if_open('BZ:2182353') @pytest.mark.rhel_ver_match('8') @pytest.mark.tier3 def test_positive_run_scheduled_job_template_by_ip(session, module_org, rex_contenthost): @@ -211,19 +258,20 @@ def test_positive_run_scheduled_job_template_by_ip(session, module_org, rex_cont :parametrized: yes """ - job_time = 10 * 60 + job_time = 6 * 60 hostname = rex_contenthost.hostname with session: session.organization.select(module_org.name) session.location.select('Default Location') assert session.host.search(hostname)[0]['Name'] == hostname plan_time = session.browser.get_client_datetime() + datetime.timedelta(seconds=job_time) + command_to_run = 'sleep 10' job_status = session.host.schedule_remote_job( [hostname], { 'category_and_template.job_category': 'Commands', 'category_and_template.job_template': 'Run Command - Script Default', - 'target_hosts_and_inputs.command': 'ls', + 'target_hosts_and_inputs.command': command_to_run, 'schedule.future': True, 'schedule_future_execution.start_at_date': plan_time.strftime("%Y/%m/%d"), 'schedule_future_execution.start_at_time': plan_time.strftime("%H:%M"), @@ -237,297 +285,36 @@ def test_positive_run_scheduled_job_template_by_ip(session, module_org, rex_cont # the job_time must be significantly greater than job creation time. assert job_left_time > 0 assert job_status['overview']['hosts_table'][0]['Host'] == hostname - assert job_status['overview']['hosts_table'][0]['Status'] == 'N/A' + assert job_status['overview']['hosts_table'][0]['Status'] in ('Awaiting start', 'N/A') # sleep 3/4 of the left time time.sleep(job_left_time * 3 / 4) - job_status = session.jobinvocation.read('Run ls', hostname, 'overview.hosts_table') + job_status = session.jobinvocation.read( + f'Run {command_to_run}', hostname, 'overview.hosts_table' + ) assert job_status['overview']['hosts_table'][0]['Host'] == hostname - assert job_status['overview']['hosts_table'][0]['Status'] == 'N/A' + assert job_status['overview']['hosts_table'][0]['Status'] in ('Awaiting start', 'N/A') # recalculate the job left time to be more accurate job_left_time = (plan_time - session.browser.get_client_datetime()).total_seconds() # the last read time should not take more than 1/4 of the last left time assert job_left_time > 0 wait_for( - lambda: session.jobinvocation.read('Run ls', hostname, 'overview.hosts_table')[ - 'overview' - ]['hosts_table'][0]['Status'] + lambda: session.jobinvocation.read( + f'Run {command_to_run}', hostname, 'overview.hosts_table' + )['overview']['hosts_table'][0]['Status'] == 'running', timeout=(job_left_time + 30), delay=1, ) # wait the job to change status to "success" wait_for( - lambda: session.jobinvocation.read('Run ls', hostname, 'overview.hosts_table')[ - 'overview' - ]['hosts_table'][0]['Status'] + lambda: session.jobinvocation.read( + f'Run {command_to_run}', hostname, 'overview.hosts_table' + )['overview']['hosts_table'][0]['Status'] == 'success', timeout=30, delay=1, ) - job_status = session.jobinvocation.read('Run ls', hostname, 'overview') + job_status = session.jobinvocation.read(f'Run {command_to_run}', hostname, 'overview') assert job_status['overview']['job_status'] == 'Success' assert job_status['overview']['hosts_table'][0]['Host'] == hostname assert job_status['overview']['hosts_table'][0]['Status'] == 'success' - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_ansible_job_check_mode(session): - """Run a job on a host with enable_roles_check_mode parameter enabled - - :id: 7aeb7253-e555-4e28-977f-71f16d3c32e2 - - :steps: - - 1. Set the value of the ansible_roles_check_mode parameter to true on a host - 2. Associate one or more Ansible roles with the host - 3. Run Ansible roles against the host - - :expectedresults: Verify that the roles were run in check mode - (i.e. no changes were made on the host) - - :CaseAutomation: NotAutomated - - :CaseComponent: Ansible-RemoteExecution - - :Team: Rocket - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_ansible_config_report_failed_tasks_errors(session): - """Check that failed Ansible tasks show as errors in the config report - - :id: 1a91e534-143f-4f35-953a-7ad8b7d2ddf3 - - :steps: - - 1. Import Ansible roles - 2. Assign Ansible roles to a host - 3. Run Ansible roles on host - - :expectedresults: Verify that any task failures are listed as errors in the config report - - :CaseAutomation: NotAutomated - - :CaseComponent: Ansible-ConfigurationManagement - - :Team: Rocket - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_ansible_config_report_changes_notice(session): - """Check that Ansible tasks that make changes on a host show as notice in the config report - - :id: 8c90f179-8b70-4932-a477-75dc3566c437 - - :steps: - - 1. Import Ansible Roles - 2. Assign Ansible roles to a host - 3. Run Ansible Roles on a host - - :expectedresults: Verify that any tasks that make changes on the host - are listed as notice in the config report - - :CaseAutomation: NotAutomated - - :CaseComponent: Ansible-ConfigurationManagement - - :Team: Rocket - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_ansible_variables_imported_with_roles(session): - """Verify that, when Ansible roles are imported, their variables are imported simultaneously - - :id: 107c53e8-5a8a-4291-bbde-fbd66a0bb85e - - :steps: - - 1. Import Ansible roles - - :expectedresults: Verify that any variables in the role were also imported to Satellite - - :CaseAutomation: NotAutomated - - :CaseComponent: Ansible-ConfigurationManagement - - :Team: Rocket - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_roles_import_in_background(session): - """Verify that importing roles does not create a popup that blocks the UI - - :id: 4f1c7b76-9c67-42b2-9a73-980ca1f05abc - - :steps: - - 1. Import Ansible roles - - :expectedresults: Verify that the UI is accessible while roles are importing - - :CaseAutomation: NotAutomated - - :CaseComponent: Ansible-ConfigurationManagement - - :Team: Rocket - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_ansible_roles_ignore_list(session): - """Verify that the ignore list setting prevents selected roles from being available for import - - :id: 6fa1d8f0-b583-4a07-88eb-c9ae7fcd0219 - - :steps: - - 1. Add roles to the ignore list in Administer > Settings > Ansible - 2. Navigate to Configure > Roles - - :expectedresults: Verify that any roles on the ignore list are not available for import - - :CaseAutomation: NotAutomated - - :CaseComponent: Ansible-ConfigurationManagement - - :Team: Rocket - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_ansible_variables_installed_with_collection(session): - """Verify that installing an Ansible collection also imports - any variables associated with the collection - - :id: 7ff88022-fe9b-482f-a6bb-3922036a1e1c - - :steps: - - 1. Install an Ansible collection - 2. Navigate to Configure > Variables - - :expectedresults: Verify that any variables associated with the collection - are present on Configure > Variables - - :CaseAutomation: NotAutomated - - :CaseComponent: Ansible-ConfigurationManagement - - :Team: Rocket - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_install_ansible_collection_via_job_invocation(session): - """Verify that Ansible collections can be installed on hosts via job invocations - - :id: d4096aef-f6fc-41b6-ae56-d19b1f49cd42 - - :steps: - - 1. Enable a host for remote execution - 2. Navigate to Hosts > Schedule Remote Job - 3. Select "Ansible Galaxy" as the job category - 4. Select "Ansible Collection - Install from Galaxy" as the job template - 5. Enter a collection in the ansible_collections_list field - 6. Click "Submit" - - :expectedresults: The Ansible collection is successfully installed on the host - - :CaseAutomation: NotAutomated - - :CaseComponent: Ansible-RemoteExecution - - :Team: Rocket - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_set_ansible_role_order_per_host(session): - """Verify that role run order can be set and that this order is respected when roles are run - - :id: 24fbcd60-7cd1-46ff-86ac-16d6b436202c - - :steps: - - 1. Enable a host for remote execution - 2. Navigate to Hosts > All Hosts > $hostname > Edit > Ansible Roles - 3. Assign more than one role to the host - 4. Use the drag-and-drop mechanism to change the order of the roles - 5. Run Ansible roles on the host - - :expectedresults: The roles are run in the specified order - - :CaseAutomation: NotAutomated - - :CaseComponent: Ansible-ConfigurationManagement - - :Team: Rocket - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_set_ansible_role_order_per_hostgroup(session): - """Verify that role run order can be set and that this order is respected when roles are run - - :id: 9eb5bc8e-081a-45b9-8751-f4220c944da6 - - :steps: - - 1. Enable a host for remote execution - 2. Create a host group - 3. Navigate to Configure > Host Groups > $hostgroup > Ansible Roles - 4. Assign more than one role to the host group - 5. Use the drag-and-drop mechanism to change the order of the roles - 6. Add the host to the host group - 7. Run Ansible roles on the host group - - :expectedresults: The roles are run in the specified order - - :CaseAutomation: NotAutomated - - :CaseComponent: Ansible-ConfigurationManagement - - :Team: Rocket - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_matcher_field_highlight(session): - """Verify that Ansible variable matcher fields change color when modified - - :id: 67b45cfe-31bb-41a8-b88e-27917c68f33e - - :steps: - - 1. Navigate to Configure > Variables > $variablename - 2. Select the "Override" checkbox in the "Default Behavior" section - 3. Click "+Add Matcher" in the "Specify Matcher" section - 4. Select an option from the "Attribute type" dropdown - 5. Add text to the attribute type input field - 6. Add text to the "Value" input field - - :expectedresults: The background of each field turns yellow when a change is made - - :CaseAutomation: NotAutomated - - :CaseComponent: Ansible-ConfigurationManagement - - :Team: Rocket - """ diff --git a/tests/foreman/ui/test_reporttemplates.py b/tests/foreman/ui/test_reporttemplates.py index f8fbfff52d9..262bddf9d45 100644 --- a/tests/foreman/ui/test_reporttemplates.py +++ b/tests/foreman/ui/test_reporttemplates.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import csv import json import os diff --git a/tests/foreman/ui/test_repositories.py b/tests/foreman/ui/test_repositories.py index 55f69a165d2..f9cb1e95632 100644 --- a/tests/foreman/ui/test_repositories.py +++ b/tests/foreman/ui/test_repositories.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import pytest diff --git a/tests/foreman/ui/test_repository.py b/tests/foreman/ui/test_repository.py index 1d9d40e10bb..554d61babd9 100644 --- a/tests/foreman/ui/test_repository.py +++ b/tests/foreman/ui/test_repository.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from datetime import datetime, timedelta from random import randint, shuffle @@ -583,8 +584,9 @@ def test_positive_upstream_with_credentials(session, module_prod): ) assert session.repository.search(module_prod.name, repo_name)[0]['Name'] == repo_name repo_values = session.repository.read(module_prod.name, repo_name) - assert repo_values['repo_content']['upstream_authorization'] == '{} / {}'.format( - upstream_username, hidden_password + assert ( + repo_values['repo_content']['upstream_authorization'] + == f'{upstream_username} / {hidden_password}' ) session.repository.update( module_prod.name, @@ -596,8 +598,9 @@ def test_positive_upstream_with_credentials(session, module_prod): }, ) repo_values = session.repository.read(module_prod.name, repo_name) - assert repo_values['repo_content']['upstream_authorization'] == '{} / {}'.format( - new_upstream_username, hidden_password + assert ( + repo_values['repo_content']['upstream_authorization'] + == f'{new_upstream_username} / {hidden_password}' ) session.repository.update( module_prod.name, repo_name, {'repo_content.upstream_authorization': {}} diff --git a/tests/foreman/ui/test_rhc.py b/tests/foreman/ui/test_rhc.py index f3ab5eae14f..b9f02dcee8f 100644 --- a/tests/foreman/ui/test_rhc.py +++ b/tests/foreman/ui/test_rhc.py @@ -6,11 +6,12 @@ :CaseComponent: RHCloud -:Team: Platform +:Team: Phoenix-subscriptions :CaseImportance: High """ + from datetime import datetime, timedelta from fauxfactory import gen_string diff --git a/tests/foreman/ui/test_rhcloud_insights.py b/tests/foreman/ui/test_rhcloud_insights.py index 2a3cabf789f..286727095d1 100644 --- a/tests/foreman/ui/test_rhcloud_insights.py +++ b/tests/foreman/ui/test_rhcloud_insights.py @@ -6,11 +6,12 @@ :CaseComponent: RHCloud -:Team: Platform +:Team: Phoenix-subscriptions :CaseImportance: High """ + from datetime import datetime import pytest diff --git a/tests/foreman/ui/test_rhcloud_inventory.py b/tests/foreman/ui/test_rhcloud_inventory.py index 642bef3b84b..f576f88b3e0 100644 --- a/tests/foreman/ui/test_rhcloud_inventory.py +++ b/tests/foreman/ui/test_rhcloud_inventory.py @@ -6,11 +6,12 @@ :CaseComponent: RHCloud -:Team: Platform +:Team: Phoenix-subscriptions :CaseImportance: High """ + from datetime import datetime, timedelta import pytest diff --git a/tests/foreman/ui/test_role.py b/tests/foreman/ui/test_role.py index 652bdcbd4f6..6a93789f6aa 100644 --- a/tests/foreman/ui/test_role.py +++ b/tests/foreman/ui/test_role.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import random from navmazing import NavigationTriesExceeded diff --git a/tests/foreman/ui/test_settings.py b/tests/foreman/ui/test_settings.py index 72bcf9223f0..e734fddd33c 100644 --- a/tests/foreman/ui/test_settings.py +++ b/tests/foreman/ui/test_settings.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import math from fauxfactory import gen_url diff --git a/tests/foreman/ui/test_smartclassparameter.py b/tests/foreman/ui/test_smartclassparameter.py index adc5169f2ae..7cfe14cd036 100644 --- a/tests/foreman/ui/test_smartclassparameter.py +++ b/tests/foreman/ui/test_smartclassparameter.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from random import choice, uniform import pytest diff --git a/tests/foreman/ui/test_subnet.py b/tests/foreman/ui/test_subnet.py index 45886c3ec86..c29bfcb850a 100644 --- a/tests/foreman/ui/test_subnet.py +++ b/tests/foreman/ui/test_subnet.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_ipaddr import pytest diff --git a/tests/foreman/ui/test_subscription.py b/tests/foreman/ui/test_subscription.py index 348398a1de2..5927bffebb7 100644 --- a/tests/foreman/ui/test_subscription.py +++ b/tests/foreman/ui/test_subscription.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from tempfile import mkstemp import time diff --git a/tests/foreman/ui/test_sync.py b/tests/foreman/ui/test_sync.py index 308ef7b485a..064b20ca4bb 100644 --- a/tests/foreman/ui/test_sync.py +++ b/tests/foreman/ui/test_sync.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_syncplan.py b/tests/foreman/ui/test_syncplan.py index ae4737fc9e1..de7f8ab3d7e 100644 --- a/tests/foreman/ui/test_syncplan.py +++ b/tests/foreman/ui/test_syncplan.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from datetime import datetime, timedelta import time diff --git a/tests/foreman/ui/test_templatesync.py b/tests/foreman/ui/test_templatesync.py index 7f0cc0e70ac..2c73fa59df4 100644 --- a/tests/foreman/ui/test_templatesync.py +++ b/tests/foreman/ui/test_templatesync.py @@ -9,6 +9,7 @@ :Team: Endeavour """ + from fauxfactory import gen_string from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_user.py b/tests/foreman/ui/test_user.py index a69b58250bf..be765f9d9b0 100644 --- a/tests/foreman/ui/test_user.py +++ b/tests/foreman/ui/test_user.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import random from fauxfactory import gen_email, gen_string diff --git a/tests/foreman/ui/test_usergroup.py b/tests/foreman/ui/test_usergroup.py index 092126ae868..4a275367e43 100644 --- a/tests/foreman/ui/test_usergroup.py +++ b/tests/foreman/ui/test_usergroup.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string, gen_utf8 from nailgun import entities import pytest diff --git a/tests/foreman/ui/test_webhook.py b/tests/foreman/ui/test_webhook.py index c0e3b470c20..90668a8f9d9 100644 --- a/tests/foreman/ui/test_webhook.py +++ b/tests/foreman/ui/test_webhook.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string, gen_url import pytest diff --git a/tests/foreman/virtwho/api/test_esx.py b/tests/foreman/virtwho/api/test_esx.py index bb6046188a0..4b208bae4c8 100644 --- a/tests/foreman/virtwho/api/test_esx.py +++ b/tests/foreman/virtwho/api/test_esx.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings @@ -359,9 +360,7 @@ def test_positive_remove_env_option( # Check the option "env=" should be removed from etc/virt-who.d/virt-who.conf option = "env" config_file = get_configure_file(virtwho_config_api.id) - env_error = ( - f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" - ) + env_error = f"option {{'{option}'}} is not exist or not be enabled in {{'{config_file}'}}" with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) assert str(exc_info.value) == env_error diff --git a/tests/foreman/virtwho/api/test_esx_sca.py b/tests/foreman/virtwho/api/test_esx_sca.py index 2d43f59637a..10c9500eed9 100644 --- a/tests/foreman/virtwho/api/test_esx_sca.py +++ b/tests/foreman/virtwho/api/test_esx_sca.py @@ -9,6 +9,7 @@ :Team: Phoenix """ + import pytest from robottelo.config import settings @@ -407,9 +408,7 @@ def test_positive_remove_env_option( # Check the option "env=" should be removed from etc/virt-who.d/virt-who.conf option = "env" config_file = get_configure_file(virtwho_config_api.id) - env_error = ( - f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" - ) + env_error = f"option {{'{option}'}} is not exist or not be enabled in {{'{config_file}'}}" with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) assert str(exc_info.value) == env_error diff --git a/tests/foreman/virtwho/api/test_hyperv.py b/tests/foreman/virtwho/api/test_hyperv.py index 6d93f1ddb9a..d66c13ed62f 100644 --- a/tests/foreman/virtwho/api/test_hyperv.py +++ b/tests/foreman/virtwho/api/test_hyperv.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/virtwho/api/test_hyperv_sca.py b/tests/foreman/virtwho/api/test_hyperv_sca.py index 715ac92cd12..e74748cde66 100644 --- a/tests/foreman/virtwho/api/test_hyperv_sca.py +++ b/tests/foreman/virtwho/api/test_hyperv_sca.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.utils.virtwho import ( diff --git a/tests/foreman/virtwho/api/test_kubevirt.py b/tests/foreman/virtwho/api/test_kubevirt.py index ef1e7ac578d..ece48a67245 100644 --- a/tests/foreman/virtwho/api/test_kubevirt.py +++ b/tests/foreman/virtwho/api/test_kubevirt.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/virtwho/api/test_kubevirt_sca.py b/tests/foreman/virtwho/api/test_kubevirt_sca.py index 51628052cd9..56f34e17ecb 100644 --- a/tests/foreman/virtwho/api/test_kubevirt_sca.py +++ b/tests/foreman/virtwho/api/test_kubevirt_sca.py @@ -9,6 +9,7 @@ :team: Phoenix-subscriptions """ + import pytest from robottelo.utils.virtwho import ( diff --git a/tests/foreman/virtwho/api/test_libvirt.py b/tests/foreman/virtwho/api/test_libvirt.py index b551e0e81dc..677d5124a24 100644 --- a/tests/foreman/virtwho/api/test_libvirt.py +++ b/tests/foreman/virtwho/api/test_libvirt.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/virtwho/api/test_libvirt_sca.py b/tests/foreman/virtwho/api/test_libvirt_sca.py index f88edd8d478..34d6b78f84c 100644 --- a/tests/foreman/virtwho/api/test_libvirt_sca.py +++ b/tests/foreman/virtwho/api/test_libvirt_sca.py @@ -9,6 +9,7 @@ :team: Phoenix-subscriptions """ + import pytest from robottelo.utils.virtwho import ( diff --git a/tests/foreman/virtwho/api/test_nutanix.py b/tests/foreman/virtwho/api/test_nutanix.py index e40433b1c57..6e29e638478 100644 --- a/tests/foreman/virtwho/api/test_nutanix.py +++ b/tests/foreman/virtwho/api/test_nutanix.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/virtwho/api/test_nutanix_sca.py b/tests/foreman/virtwho/api/test_nutanix_sca.py index 14b6d2a04fd..b6f7bbc9356 100644 --- a/tests/foreman/virtwho/api/test_nutanix_sca.py +++ b/tests/foreman/virtwho/api/test_nutanix_sca.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.utils.virtwho import ( diff --git a/tests/foreman/virtwho/cli/test_esx.py b/tests/foreman/virtwho/cli/test_esx.py index c18ffb3ab30..0b148edbfc4 100644 --- a/tests/foreman/virtwho/cli/test_esx.py +++ b/tests/foreman/virtwho/cli/test_esx.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import re from fauxfactory import gen_string @@ -410,9 +411,7 @@ def test_positive_remove_env_option( # Check the option "env=" should be removed from etc/virt-who.d/virt-who.conf option = "env" config_file = get_configure_file(virtwho_config_cli['id']) - env_error = ( - f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" - ) + env_error = f"option {{'{option}'}} is not exist or not be enabled in {{'{config_file}'}}" with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) assert str(exc_info.value) == env_error diff --git a/tests/foreman/virtwho/cli/test_esx_sca.py b/tests/foreman/virtwho/cli/test_esx_sca.py index 6b15b6803f9..a6c2e4843e2 100644 --- a/tests/foreman/virtwho/cli/test_esx_sca.py +++ b/tests/foreman/virtwho/cli/test_esx_sca.py @@ -9,6 +9,7 @@ :Team: Phoenix """ + import re from fauxfactory import gen_string @@ -492,9 +493,7 @@ def test_positive_remove_env_option( # Check the option "env=" should be removed from etc/virt-who.d/virt-who.conf option = "env" config_file = get_configure_file(virtwho_config_cli['id']) - env_error = ( - f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" - ) + env_error = f"option {{'{option}'}} is not exist or not be enabled in {{'{config_file}'}}" with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) assert str(exc_info.value) == env_error diff --git a/tests/foreman/virtwho/cli/test_hyperv.py b/tests/foreman/virtwho/cli/test_hyperv.py index 657ee3e04f6..98f84d277e1 100644 --- a/tests/foreman/virtwho/cli/test_hyperv.py +++ b/tests/foreman/virtwho/cli/test_hyperv.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/virtwho/cli/test_hyperv_sca.py b/tests/foreman/virtwho/cli/test_hyperv_sca.py index 7c59213485d..e72da8f78e0 100644 --- a/tests/foreman/virtwho/cli/test_hyperv_sca.py +++ b/tests/foreman/virtwho/cli/test_hyperv_sca.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.utils.virtwho import ( diff --git a/tests/foreman/virtwho/cli/test_kubevirt.py b/tests/foreman/virtwho/cli/test_kubevirt.py index f29a22e4a16..88e9bcdcc2e 100644 --- a/tests/foreman/virtwho/cli/test_kubevirt.py +++ b/tests/foreman/virtwho/cli/test_kubevirt.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/virtwho/cli/test_kubevirt_sca.py b/tests/foreman/virtwho/cli/test_kubevirt_sca.py index d682b5f9159..1c96fbc88a6 100644 --- a/tests/foreman/virtwho/cli/test_kubevirt_sca.py +++ b/tests/foreman/virtwho/cli/test_kubevirt_sca.py @@ -9,6 +9,7 @@ :team: Phoenix-subscriptions """ + import pytest from robottelo.utils.virtwho import ( diff --git a/tests/foreman/virtwho/cli/test_libvirt.py b/tests/foreman/virtwho/cli/test_libvirt.py index 70ac4056fa3..ea8f09c9cd0 100644 --- a/tests/foreman/virtwho/cli/test_libvirt.py +++ b/tests/foreman/virtwho/cli/test_libvirt.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/virtwho/cli/test_libvirt_sca.py b/tests/foreman/virtwho/cli/test_libvirt_sca.py index 2bfeda30801..85ee3e8d7f0 100644 --- a/tests/foreman/virtwho/cli/test_libvirt_sca.py +++ b/tests/foreman/virtwho/cli/test_libvirt_sca.py @@ -9,6 +9,7 @@ :team: Phoenix-subscriptions """ + import pytest from robottelo.utils.virtwho import ( diff --git a/tests/foreman/virtwho/cli/test_nutanix.py b/tests/foreman/virtwho/cli/test_nutanix.py index b5478912ab7..7233e8454d2 100644 --- a/tests/foreman/virtwho/cli/test_nutanix.py +++ b/tests/foreman/virtwho/cli/test_nutanix.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/virtwho/cli/test_nutanix_sca.py b/tests/foreman/virtwho/cli/test_nutanix_sca.py index 360aac0f676..a3ed65953a3 100644 --- a/tests/foreman/virtwho/cli/test_nutanix_sca.py +++ b/tests/foreman/virtwho/cli/test_nutanix_sca.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.utils.virtwho import ( diff --git a/tests/foreman/virtwho/ui/test_esx.py b/tests/foreman/virtwho/ui/test_esx.py index efd46e6deba..dc615e7b4a5 100644 --- a/tests/foreman/virtwho/ui/test_esx.py +++ b/tests/foreman/virtwho/ui/test_esx.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from datetime import datetime from fauxfactory import gen_string @@ -731,9 +732,7 @@ def test_positive_remove_env_option( option = "env" config_id = get_configure_id(name) config_file = get_configure_file(config_id) - env_error = ( - f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" - ) + env_error = f"option {{'{option}'}} is not exist or not be enabled in {{'{config_file}'}}" with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) assert str(exc_info.value) == env_error diff --git a/tests/foreman/virtwho/ui/test_esx_sca.py b/tests/foreman/virtwho/ui/test_esx_sca.py index a4c45dd4960..118e7a0cfb0 100644 --- a/tests/foreman/virtwho/ui/test_esx_sca.py +++ b/tests/foreman/virtwho/ui/test_esx_sca.py @@ -9,6 +9,7 @@ :team: Phoenix-subscriptions """ + from datetime import datetime from airgun.session import Session @@ -243,7 +244,12 @@ def test_positive_filtering_option( @pytest.mark.tier2 def test_positive_last_checkin_status( - self, module_sca_manifest_org, virtwho_config_ui, form_data_ui, org_session + self, + module_sca_manifest_org, + virtwho_config_ui, + form_data_ui, + org_session, + default_location, ): """Verify the Last Checkin status on Content Hosts Page. @@ -265,6 +271,7 @@ def test_positive_last_checkin_status( ) time_now = org_session.browser.get_client_datetime() assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' + org_session.location.select(default_location.name) checkin_time = org_session.contenthost.search(hypervisor_name)[0]['Last Checkin'] # 10 mins margin to check the Last Checkin time assert ( @@ -306,9 +313,7 @@ def test_positive_remove_env_option( option = "env" config_id = get_configure_id(name) config_file = get_configure_file(config_id) - env_error = ( - f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" - ) + env_error = f"option {{'{option}'}} is not exist or not be enabled in {{'{config_file}'}}" with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) assert str(exc_info.value) == env_error diff --git a/tests/foreman/virtwho/ui/test_hyperv.py b/tests/foreman/virtwho/ui/test_hyperv.py index 8348540326f..2db519760fe 100644 --- a/tests/foreman/virtwho/ui/test_hyperv.py +++ b/tests/foreman/virtwho/ui/test_hyperv.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/virtwho/ui/test_hyperv_sca.py b/tests/foreman/virtwho/ui/test_hyperv_sca.py index a5b560c99a1..5fcd47bdb88 100644 --- a/tests/foreman/virtwho/ui/test_hyperv_sca.py +++ b/tests/foreman/virtwho/ui/test_hyperv_sca.py @@ -9,6 +9,7 @@ :team: Phoenix-subscriptions """ + import pytest from robottelo.utils.virtwho import ( diff --git a/tests/foreman/virtwho/ui/test_kubevirt.py b/tests/foreman/virtwho/ui/test_kubevirt.py index e12c4774fc8..0f8a01f3177 100644 --- a/tests/foreman/virtwho/ui/test_kubevirt.py +++ b/tests/foreman/virtwho/ui/test_kubevirt.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/virtwho/ui/test_kubevirt_sca.py b/tests/foreman/virtwho/ui/test_kubevirt_sca.py index a1554929866..14714df675d 100644 --- a/tests/foreman/virtwho/ui/test_kubevirt_sca.py +++ b/tests/foreman/virtwho/ui/test_kubevirt_sca.py @@ -9,6 +9,7 @@ :team: Phoenix-subscriptions """ + import pytest from robottelo.utils.virtwho import ( diff --git a/tests/foreman/virtwho/ui/test_libvirt.py b/tests/foreman/virtwho/ui/test_libvirt.py index a66bf1737bb..1d1ffaa0dcc 100644 --- a/tests/foreman/virtwho/ui/test_libvirt.py +++ b/tests/foreman/virtwho/ui/test_libvirt.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/foreman/virtwho/ui/test_libvirt_sca.py b/tests/foreman/virtwho/ui/test_libvirt_sca.py index ffef902441e..66c73926ab8 100644 --- a/tests/foreman/virtwho/ui/test_libvirt_sca.py +++ b/tests/foreman/virtwho/ui/test_libvirt_sca.py @@ -9,6 +9,7 @@ :team: Phoenix-subscriptions """ + import pytest from robottelo.utils.virtwho import ( diff --git a/tests/foreman/virtwho/ui/test_nutanix.py b/tests/foreman/virtwho/ui/test_nutanix.py index 7ec05f191df..2b7295fd359 100644 --- a/tests/foreman/virtwho/ui/test_nutanix.py +++ b/tests/foreman/virtwho/ui/test_nutanix.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/foreman/virtwho/ui/test_nutanix_sca.py b/tests/foreman/virtwho/ui/test_nutanix_sca.py index bb2483195d1..b085fc55ec1 100644 --- a/tests/foreman/virtwho/ui/test_nutanix_sca.py +++ b/tests/foreman/virtwho/ui/test_nutanix_sca.py @@ -9,6 +9,7 @@ :Team: Phoenix-subscriptions """ + from fauxfactory import gen_string import pytest diff --git a/tests/robottelo/test_cli.py b/tests/robottelo/test_cli.py index debbc8c6e42..a2568f5a584 100644 --- a/tests/robottelo/test_cli.py +++ b/tests/robottelo/test_cli.py @@ -149,8 +149,8 @@ def test_add_operating_system(self, construct, execute): assert Base.command_sub != 'add-operatingsystem' assert execute.return_value == Base.add_operating_system(options) assert Base.command_sub == 'add-operatingsystem' - construct.called_once_with(options) - execute.called_once_with(construct.return_value) + construct.assert_called_once_with(options) + execute.assert_called_once_with(construct.return_value) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') @@ -159,8 +159,8 @@ def test_add_create_with_empty_result(self, construct, execute): execute.return_value = [] assert execute.return_value == Base.create() assert Base.command_sub == 'create' - construct.called_once_with({}) - execute.called_once_with(construct.return_value, output_format='csv') + construct.assert_called_once_with({}) + execute.assert_called_once_with(construct.return_value, output_format='csv', timeout=None) @mock.patch('robottelo.cli.base.Base.info') @mock.patch('robottelo.cli.base.Base.execute') @@ -170,8 +170,8 @@ def test_add_create_with_result_dct_without_id(self, construct, execute, info): execute.return_value = [{'not_id': 'foo'}] assert execute.return_value == Base.create() assert Base.command_sub == 'create' - construct.called_once_with({}) - execute.called_once_with(construct.return_value, output_format='csv') + construct.assert_called_once_with({}) + execute.assert_called_once_with(construct.return_value, output_format='csv', timeout=None) assert not info.called @mock.patch('robottelo.cli.base.Base.info') @@ -185,9 +185,9 @@ def test_add_create_with_result_dct_with_id_not_required_org(self, construct, ex Base.command_requires_org = False assert execute.return_value == Base.create() assert Base.command_sub == 'create' - construct.called_once_with({}) - execute.called_once_with(construct.return_value, output_format='csv') - info.called_once_with({'id': 'foo'}) + construct.assert_called_once_with({}) + execute.assert_called_once_with(construct.return_value, output_format='csv', timeout=None) + info.assert_called_once_with({'id': 'foo'}) @mock.patch('robottelo.cli.base.Base.info') @mock.patch('robottelo.cli.base.Base.execute') @@ -200,9 +200,9 @@ def test_add_create_with_result_dct_with_id_required_org(self, construct, execut Base.command_requires_org = True assert execute.return_value == Base.create({'organization-id': 'org-id'}) assert Base.command_sub == 'create' - construct.called_once_with({}) - execute.called_once_with(construct.return_value, output_format='csv') - info.called_once_with({'id': 'foo', 'organization-id': 'org-id'}) + construct.assert_called_once_with({'organization-id': 'org-id'}) + execute.assert_called_once_with(construct.return_value, output_format='csv', timeout=None) + info.assert_called_once_with({'id': 'foo', 'organization-id': 'org-id'}) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') @@ -215,8 +215,8 @@ def test_add_create_with_result_dct_id_required_org_error(self, construct, execu with pytest.raises(CLIError): Base.create() assert Base.command_sub == 'create' - construct.called_once_with({}) - execute.called_once_with(construct.return_value, output_format='csv') + construct.assert_called_once_with({}) + execute.assert_called_once_with(construct.return_value, output_format='csv', timeout=None) def assert_cmd_execution( self, construct, execute, base_method, cmd_sub, ignore_stderr=False, **base_method_kwargs @@ -224,8 +224,10 @@ def assert_cmd_execution( """Asssert Base class method successfully executed""" assert execute.return_value == base_method(**base_method_kwargs) assert cmd_sub == Base.command_sub - construct.called_once_with({}) - execute.called_once_with(construct.return_value, ignore_stderr=ignore_stderr) + construct.assert_called_once_with(base_method_kwargs.get('options')) + execute.assert_called_once_with( + construct.return_value, ignore_stderr=ignore_stderr, timeout=None + ) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') @@ -308,16 +310,36 @@ def test_info_requires_organization_id(self, _): # noqa: PT019 - not a fixture with pytest.raises(CLIError): Base.info() + def assert_alt_cmd_execution( + self, + construct, + execute, + base_method, + cmd_sub, + call_kwargs, + command_kwarg=True, + **base_method_kwargs, + ): + """Asssert Base class method successfully executed""" + assert execute.return_value == base_method(**base_method_kwargs) + assert cmd_sub == Base.command_sub + construct.assert_called_once_with(base_method_kwargs.get('options')) + if command_kwarg: + execute.assert_called_once_with(command=construct.return_value, **call_kwargs) + else: + execute.assert_called_once_with(construct.return_value, **call_kwargs) + @mock.patch('robottelo.cli.base.hammer.parse_info') @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') def test_info_without_parsing_response(self, construct, execute, parse): """Check info method execution without parsing response""" - self.assert_cmd_execution( + self.assert_alt_cmd_execution( construct, execute, Base.info, 'info', + call_kwargs={'output_format': 'json', 'return_raw_response': None}, output_format='json', options={'organization-id': 1}, ) @@ -329,18 +351,15 @@ def test_info_without_parsing_response(self, construct, execute, parse): def test_info_parsing_response(self, construct, execute, parse): """Check info method execution parsing response""" parse.return_value = execute.return_value = 'some_response' - self.assert_cmd_execution( - construct, execute, Base.info, 'info', options={'organization-id': 1} + self.assert_alt_cmd_execution( + construct, + execute, + Base.info, + 'info', + call_kwargs={'output_format': None, 'return_raw_response': None}, + options={'organization-id': 1}, ) - parse.called_once_with('some_response') - - # @mock.patch('robottelo.cli.base.Base.command_requires_org') - # def test_list_requires_organization_id(self, _): - # """Check list raises CLIError with organization-id is not present in - # options - # """ - # with pytest.raises(CLIError): - # Base.list() + parse.assert_called_once_with('some_response') @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') @@ -348,8 +367,8 @@ def test_list_with_default_per_page(self, construct, execute): """Check list method set per_page as 1000 by default""" assert execute.return_value == Base.list(options={'organization-id': 1}) assert Base.command_sub == 'list' - construct.called_once_with({'per-page': 1000}) - execute.called_once_with(construct.return_value, output_format='csv') + construct.assert_called_once_with({'organization-id': 1, 'per-page': 10000}) + execute.assert_called_once_with(construct.return_value, output_format='csv') @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') @@ -358,39 +377,80 @@ def test_list_without_per_page(self, construct, execute): list_with_per_page_false = partial( Base.list, per_page=False, options={'organization-id': 1} ) - self.assert_cmd_execution(construct, execute, list_with_per_page_false, 'list') + self.assert_alt_cmd_execution( + construct, + execute, + list_with_per_page_false, + 'list', + call_kwargs={'output_format': 'csv'}, + command_kwarg=False, + options={'organization-id': 1}, + ) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') def test_puppet_classes(self, construct, execute): """Check puppet_classes method execution""" - self.assert_cmd_execution(construct, execute, Base.puppetclasses, 'puppet-classes') + self.assert_alt_cmd_execution( + construct, + execute, + Base.puppetclasses, + 'puppet-classes', + call_kwargs={'output_format': 'csv'}, + command_kwarg=False, + ) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') def test_remove_operating_system(self, construct, execute): """Check remove_operating_system method execution""" - self.assert_cmd_execution( - construct, execute, Base.remove_operating_system, 'remove-operatingsystem' + self.assert_alt_cmd_execution( + construct, + execute, + Base.remove_operating_system, + 'remove-operatingsystem', + call_kwargs={}, + command_kwarg=False, ) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') def test_sc_params(self, construct, execute): """Check sc_params method execution""" - self.assert_cmd_execution(construct, execute, Base.sc_params, 'sc-params') + self.assert_alt_cmd_execution( + construct, + execute, + Base.sc_params, + 'sc-params', + call_kwargs={'output_format': 'csv'}, + command_kwarg=False, + ) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') def test_set_parameter(self, construct, execute): """Check set_parameter method execution""" - self.assert_cmd_execution(construct, execute, Base.set_parameter, 'set-parameter') + self.assert_alt_cmd_execution( + construct, + execute, + Base.set_parameter, + 'set-parameter', + call_kwargs={}, + command_kwarg=False, + ) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') def test_update(self, construct, execute): """Check update method execution""" - self.assert_cmd_execution(construct, execute, Base.update, 'update') + self.assert_alt_cmd_execution( + construct, + execute, + Base.update, + 'update', + call_kwargs={'output_format': 'csv', 'return_raw_response': None}, + command_kwarg=False, + ) class CLIErrorTests(unittest.TestCase): diff --git a/tests/robottelo/test_cli_method_calls.py b/tests/robottelo/test_cli_method_calls.py index 04d4c65e8cd..765a24d2044 100644 --- a/tests/robottelo/test_cli_method_calls.py +++ b/tests/robottelo/test_cli_method_calls.py @@ -40,8 +40,8 @@ def test_cli_org_method_called(mocker, command_sub): options = {'foo': 'bar'} assert execute.return_value == getattr(Org, command_sub.replace('-', '_'))(options) assert command_sub == Org.command_sub - assert construct.called_once_with(options) - assert execute.called_once_with(construct.return_value) + construct.assert_called_once_with(options) + execute.assert_called_once_with(construct.return_value) @pytest.mark.parametrize('command_sub', ['import-classes', 'refresh-features']) @@ -54,11 +54,11 @@ def test_cli_proxy_method_called(mocker, command_sub): options = {'foo': 'bar'} assert execute.return_value == getattr(Proxy, command_sub.replace('-', '_'))(options) assert command_sub == Proxy.command_sub - assert construct.called_once_with(options) - assert execute.called_once_with(construct.return_value) + construct.assert_called_once_with(options) + execute.assert_called_once_with(construct.return_value) -@pytest.mark.parametrize('command_sub', ['synchronize', 'remove-content', 'upload-content']) +@pytest.mark.parametrize('command_sub', ['remove-content', 'upload-content']) def test_cli_repository_method_called(mocker, command_sub): """Check Repository methods are called and command_sub edited This is a parametrized test called by Pytest for each of Repository methods @@ -68,8 +68,8 @@ def test_cli_repository_method_called(mocker, command_sub): options = {'foo': 'bar'} assert execute.return_value == getattr(Repository, command_sub.replace('-', '_'))(options) assert command_sub == Repository.command_sub - assert construct.called_once_with(options) - assert execute.called_once_with(construct.return_value) + construct.assert_called_once_with(options) + execute.assert_called_once_with(construct.return_value, output_format='csv', ignore_stderr=True) @pytest.mark.parametrize('command_sub', ['info', 'create']) @@ -94,5 +94,5 @@ def test_cli_subscription_method_called(mocker, command_sub): options = {'foo': 'bar'} assert execute.return_value == getattr(Subscription, command_sub.replace('-', '_'))(options) assert command_sub == Subscription.command_sub - assert construct.called_once_with(options) - assert execute.called_once_with(construct.return_value) + construct.assert_called_once_with(options) + execute.assert_called_once_with(construct.return_value, ignore_stderr=True, timeout=None) diff --git a/tests/robottelo/test_datafactory.py b/tests/robottelo/test_datafactory.py index b93d401ed89..9d8406640c4 100644 --- a/tests/robottelo/test_datafactory.py +++ b/tests/robottelo/test_datafactory.py @@ -1,4 +1,5 @@ """Tests for module ``robottelo.utils.datafactory``.""" + import itertools import random from unittest import mock diff --git a/tests/robottelo/test_decorators.py b/tests/robottelo/test_decorators.py index 6c5e60f568a..9a50461962a 100644 --- a/tests/robottelo/test_decorators.py +++ b/tests/robottelo/test_decorators.py @@ -1,4 +1,5 @@ """Unit tests for :mod:`robottelo.utils.decorators`.""" + from unittest import mock import pytest diff --git a/tests/robottelo/test_dependencies.py b/tests/robottelo/test_dependencies.py index 950cbeebec4..d12372a64a8 100644 --- a/tests/robottelo/test_dependencies.py +++ b/tests/robottelo/test_dependencies.py @@ -1,4 +1,5 @@ """Test important behavior in robottelo's direct dependencies""" + import contextlib diff --git a/tests/robottelo/test_func_locker.py b/tests/robottelo/test_func_locker.py index d0da97431e0..de2ef29aa33 100644 --- a/tests/robottelo/test_func_locker.py +++ b/tests/robottelo/test_func_locker.py @@ -96,8 +96,9 @@ def simple_recursive_locking_function(): """try to trigger the same lock from the same process, an exception should be expected """ - with func_locker.locking_function(simple_locked_function), func_locker.locking_function( - simple_locked_function + with ( + func_locker.locking_function(simple_locked_function), + func_locker.locking_function(simple_locked_function), ): pass return 'I should not be reached' @@ -125,9 +126,10 @@ def simple_function_to_lock(): def simple_with_locking_function(index=None): global counter_file time.sleep(0.05) - with func_locker.locking_function(simple_locked_function), open( - _get_function_lock_path('simple_locked_function') - ) as rf: + with ( + func_locker.locking_function(simple_locked_function), + open(_get_function_lock_path('simple_locked_function')) as rf, + ): content = rf.read() if index is not None: @@ -234,9 +236,10 @@ def test_locker_file_location_when_in_class(self): content = '' assert str(os.getpid()) != content - with func_locker.locking_function(SimpleClass.simple_function_to_lock), open( - file_path - ) as rf: + with ( + func_locker.locking_function(SimpleClass.simple_function_to_lock), + open(file_path) as rf, + ): content = rf.read() assert str(os.getpid()) == content @@ -249,9 +252,10 @@ def test_locker_file_location_when_in_class(self): content = '' assert str(os.getpid()) != content - with func_locker.locking_function(SimpleClass.simple_function_to_lock_cls), open( - file_path - ) as rf: + with ( + func_locker.locking_function(SimpleClass.simple_function_to_lock_cls), + open(file_path) as rf, + ): content = rf.read() assert str(os.getpid()) == content @@ -296,9 +300,10 @@ def test_locker_file_location_when_in_class(self): else: content = '' assert str(os.getpid()) != content - with func_locker.locking_function(SimpleClass.SubClass.simple_function_to_lock_cls), open( - file_path - ) as rf: + with ( + func_locker.locking_function(SimpleClass.SubClass.simple_function_to_lock_cls), + open(file_path) as rf, + ): content = rf.read() assert str(os.getpid()) == content @@ -410,7 +415,8 @@ def test_scoped_with_locking(self): assert os.path.exists(lock_file_path) def test_negative_with_locking_not_locked(self): - with pytest.raises( - func_locker.FunctionLockerError, match=r'.*Cannot ensure locking.*' - ), func_locker.locking_function(simple_function_not_locked): + with ( + pytest.raises(func_locker.FunctionLockerError, match=r'.*Cannot ensure locking.*'), + func_locker.locking_function(simple_function_not_locked), + ): pass diff --git a/tests/robottelo/test_hammer.py b/tests/robottelo/test_hammer.py index 0a887a4c0bc..fce548fa0d6 100644 --- a/tests/robottelo/test_hammer.py +++ b/tests/robottelo/test_hammer.py @@ -1,4 +1,5 @@ """Tests for Robottelo's hammer helpers""" + from robottelo.cli import hammer diff --git a/tests/robottelo/test_helpers.py b/tests/robottelo/test_helpers.py index 0b39bb54b9e..b73fb78af7d 100644 --- a/tests/robottelo/test_helpers.py +++ b/tests/robottelo/test_helpers.py @@ -1,4 +1,5 @@ """Tests for module ``robottelo.helpers``.""" + import pytest from robottelo.utils import slugify_component, validate_ssh_pub_key diff --git a/tests/robottelo/test_issue_handlers.py b/tests/robottelo/test_issue_handlers.py index 4da0b6df011..430cf18644b 100644 --- a/tests/robottelo/test_issue_handlers.py +++ b/tests/robottelo/test_issue_handlers.py @@ -342,10 +342,16 @@ def test_bz_should_not_deselect(self): @pytest.mark.parametrize('issue', ["BZ123456", "XX:123456", "KK:89456", "123456", 999999]) def test_invalid_handler(self, issue): """Assert is_open w/ invalid handlers raise AttributeError""" - issue_deselect = should_deselect(issue) - with pytest.raises(AttributeError): - is_open(issue) - assert issue_deselect is None + if issue == 'BZ123456': + with pytest.raises(KeyError): + should_deselect(issue) + with pytest.raises(KeyError): + is_open(issue) + else: + issue_deselect = should_deselect(issue) + with pytest.raises(AttributeError): + is_open(issue) + assert issue_deselect is None def test_bz_cache(self, request): """Assert basic behavior of the --bz-cache pytest option""" @@ -359,7 +365,6 @@ def _remove_file(): os.remove(DEFAULT_BZ_CACHE_FILE) try: - subprocess.run( [sys.executable, '-m', 'pytest', '--collect-only', 'tests/robottelo'], check=True ) @@ -402,7 +407,11 @@ def test_add_workaround(): add_workaround(data, matches, 'test', foo='bar') add_workaround( - data, matches, 'test', validation=lambda *a, **k: False, zaz='traz' # Should not be added + data, + matches, + 'test', + validation=lambda *a, **k: False, + zaz='traz', # Should not be added ) for match in matches: diff --git a/tests/robottelo/test_ssh.py b/tests/robottelo/test_ssh.py index 6167a0292be..28012695d96 100644 --- a/tests/robottelo/test_ssh.py +++ b/tests/robottelo/test_ssh.py @@ -1,4 +1,5 @@ """Tests for module ``robottelo.utils.ssh``.""" + from unittest import mock from robottelo import ssh diff --git a/tests/upgrades/conftest.py b/tests/upgrades/conftest.py index 1853efaf073..d527a854905 100644 --- a/tests/upgrades/conftest.py +++ b/tests/upgrades/conftest.py @@ -81,6 +81,7 @@ def test_capsule_post_upgrade_skipped(pre_upgrade_data): # in post_upgrade scenario, test results should be # 2 passed, 6 deselected """ + import datetime import functools import json diff --git a/tests/upgrades/test_activation_key.py b/tests/upgrades/test_activation_key.py index 11d37ccdab0..540c5b52b58 100644 --- a/tests/upgrades/test_activation_key.py +++ b/tests/upgrades/test_activation_key.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from requests.exceptions import HTTPError diff --git a/tests/upgrades/test_bookmarks.py b/tests/upgrades/test_bookmarks.py index b0d04cfa985..f9f0986aec0 100644 --- a/tests/upgrades/test_bookmarks.py +++ b/tests/upgrades/test_bookmarks.py @@ -11,9 +11,10 @@ :CaseImportance: High """ + import pytest -from robottelo.constants import BOOKMARK_ENTITIES +from robottelo.constants import BOOKMARK_ENTITIES_SELECTION class TestPublicDisableBookmark: @@ -45,7 +46,7 @@ def test_pre_create_public_disable_bookmark(self, request, target_sat): :CaseImportance: Critical """ - for entity in BOOKMARK_ENTITIES: + for entity in BOOKMARK_ENTITIES_SELECTION: book_mark_name = entity["name"] + request.node.name bm = target_sat.api.Bookmark( controller=entity['controller'], @@ -77,7 +78,7 @@ def test_post_create_public_disable_bookmark(self, dependent_scenario_name, targ :CaseImportance: Critical """ pre_test_name = dependent_scenario_name - for entity in BOOKMARK_ENTITIES: + for entity in BOOKMARK_ENTITIES_SELECTION: book_mark_name = entity["name"] + pre_test_name bm = target_sat.api.Bookmark().search(query={'search': f'name="{book_mark_name}"'})[0] assert bm.controller == entity['controller'] @@ -115,7 +116,7 @@ def test_pre_create_public_enable_bookmark(self, request, target_sat): :customerscenario: true """ - for entity in BOOKMARK_ENTITIES: + for entity in BOOKMARK_ENTITIES_SELECTION: book_mark_name = entity["name"] + request.node.name bm = target_sat.api.Bookmark( controller=entity['controller'], @@ -145,7 +146,7 @@ def test_post_create_public_enable_bookmark(self, dependent_scenario_name, targe :CaseImportance: Critical """ pre_test_name = dependent_scenario_name - for entity in BOOKMARK_ENTITIES: + for entity in BOOKMARK_ENTITIES_SELECTION: book_mark_name = entity["name"] + pre_test_name bm = target_sat.api.Bookmark().search(query={'search': f'name="{book_mark_name}"'})[0] assert bm.controller == entity['controller'] diff --git a/tests/upgrades/test_capsule.py b/tests/upgrades/test_capsule.py index c7cf63e9977..e106c9a860e 100644 --- a/tests/upgrades/test_capsule.py +++ b/tests/upgrades/test_capsule.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import os import pytest diff --git a/tests/upgrades/test_classparameter.py b/tests/upgrades/test_classparameter.py index b7f8d1c6ed1..f0fd73f78ad 100644 --- a/tests/upgrades/test_classparameter.py +++ b/tests/upgrades/test_classparameter.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import json import pytest diff --git a/tests/upgrades/test_client.py b/tests/upgrades/test_client.py index 8ca57aa4586..4313675a6c0 100644 --- a/tests/upgrades/test_client.py +++ b/tests/upgrades/test_client.py @@ -14,6 +14,7 @@ :CaseImportance: High """ + import pytest from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_NAME, FAKE_4_CUSTOM_PACKAGE_NAME diff --git a/tests/upgrades/test_contentview.py b/tests/upgrades/test_contentview.py index 2b581297b1e..537ea003e03 100644 --- a/tests/upgrades/test_contentview.py +++ b/tests/upgrades/test_contentview.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_alpha import pytest diff --git a/tests/upgrades/test_discovery.py b/tests/upgrades/test_discovery.py index c85e02e01f4..1cf7404fb19 100644 --- a/tests/upgrades/test_discovery.py +++ b/tests/upgrades/test_discovery.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import re from packaging.version import Version diff --git a/tests/upgrades/test_errata.py b/tests/upgrades/test_errata.py index ed2407d67c6..5f84af8f8b5 100644 --- a/tests/upgrades/test_errata.py +++ b/tests/upgrades/test_errata.py @@ -11,6 +11,7 @@ :CaseImportance: Critical """ + import pytest from wait_for import wait_for diff --git a/tests/upgrades/test_host.py b/tests/upgrades/test_host.py index b60585d63bd..b5db455ab33 100644 --- a/tests/upgrades/test_host.py +++ b/tests/upgrades/test_host.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/upgrades/test_hostcontent.py b/tests/upgrades/test_hostcontent.py index 0a6a60e2a9e..b8faff77feb 100644 --- a/tests/upgrades/test_hostcontent.py +++ b/tests/upgrades/test_hostcontent.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest diff --git a/tests/upgrades/test_hostgroup.py b/tests/upgrades/test_hostgroup.py index a6d67a106db..a61e854bc5c 100644 --- a/tests/upgrades/test_hostgroup.py +++ b/tests/upgrades/test_hostgroup.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/upgrades/test_performance_tuning.py b/tests/upgrades/test_performance_tuning.py index 33237e8f561..28a30aeed95 100644 --- a/tests/upgrades/test_performance_tuning.py +++ b/tests/upgrades/test_performance_tuning.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import filecmp import pytest diff --git a/tests/upgrades/test_provisioningtemplate.py b/tests/upgrades/test_provisioningtemplate.py index 7c603c30a53..cc686e3d22f 100644 --- a/tests/upgrades/test_provisioningtemplate.py +++ b/tests/upgrades/test_provisioningtemplate.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/upgrades/test_puppet.py b/tests/upgrades/test_puppet.py index b0c96394cc8..9c8f53fbbe1 100644 --- a/tests/upgrades/test_puppet.py +++ b/tests/upgrades/test_puppet.py @@ -11,6 +11,7 @@ :CaseImportance: Medium """ + import pytest diff --git a/tests/upgrades/test_remoteexecution.py b/tests/upgrades/test_remoteexecution.py index 62d469e5bc9..18019dd3f73 100644 --- a/tests/upgrades/test_remoteexecution.py +++ b/tests/upgrades/test_remoteexecution.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest diff --git a/tests/upgrades/test_repository.py b/tests/upgrades/test_repository.py index 31233d820c6..9b4de62f02f 100644 --- a/tests/upgrades/test_repository.py +++ b/tests/upgrades/test_repository.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.config import settings diff --git a/tests/upgrades/test_role.py b/tests/upgrades/test_role.py index c2ee4b25d20..1af836c5bcb 100644 --- a/tests/upgrades/test_role.py +++ b/tests/upgrades/test_role.py @@ -2,7 +2,7 @@ :Requirement: UpgradedSatellite -:CaseAutomation: NotAutomated +:CaseAutomation: Automated :CaseComponent: UsersRoles @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest @@ -31,6 +32,8 @@ class TestOverriddenFilter: 1. The Filter should be have set override flag postupgrade 2. The locations and organizations of filter should be unchanged postupgrade + + :CaseAutomation: NotAutomated """ @pytest.mark.pre_upgrade @@ -82,6 +85,8 @@ class TestBuiltInRolesLocked: 1. Builtin roles of satellite should be locked and non-editable 2. Built in roles of satellite should be allowed to clone + + :CaseAutomation: NotAutomated """ @pytest.mark.post_upgrade @@ -117,6 +122,8 @@ class TestNewOrganizationAdminRole: non-editable 4. Organization Admin role of satellite should be allowed to clone 5. Taxonomies should be assigned to cloned org admin role + + :CaseAutomation: NotAutomated """ @pytest.mark.post_upgrade @@ -164,7 +171,6 @@ def test_pre_default_role_added_permission(self, target_sat): :steps: New permission is added to existing 'Default role' :expectedresults: Permission is added to existing 'Default role'. - """ default_role = target_sat.api.Role().search(query={'search': 'name="Default role"'})[0] subnet_filter = target_sat.api.Filter( @@ -219,7 +225,6 @@ def test_pre_default_role_added_permission_with_filter(self, target_sat): :expectedresults: Permission with filter is added to existing 'Default role' - """ default_role = target_sat.api.Role().search(query={'search': 'name="Default role"'})[0] domain_filter = target_sat.api.Filter( diff --git a/tests/upgrades/test_satellite_maintain.py b/tests/upgrades/test_satellite_maintain.py index 03abb569e94..128a6dd64b2 100644 --- a/tests/upgrades/test_satellite_maintain.py +++ b/tests/upgrades/test_satellite_maintain.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import re import pytest diff --git a/tests/upgrades/test_satellitesync.py b/tests/upgrades/test_satellitesync.py index ad9356d2f97..627f389ca1a 100644 --- a/tests/upgrades/test_satellitesync.py +++ b/tests/upgrades/test_satellitesync.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest from robottelo.constants import PULP_EXPORT_DIR diff --git a/tests/upgrades/test_subnet.py b/tests/upgrades/test_subnet.py index 0fbc2f38277..220b9316c79 100644 --- a/tests/upgrades/test_subnet.py +++ b/tests/upgrades/test_subnet.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest diff --git a/tests/upgrades/test_subscription.py b/tests/upgrades/test_subscription.py index b1dd84597a1..3964dcfa6b5 100644 --- a/tests/upgrades/test_subscription.py +++ b/tests/upgrades/test_subscription.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from manifester import Manifester import pytest diff --git a/tests/upgrades/test_syncplan.py b/tests/upgrades/test_syncplan.py index 5939c8e16dc..0a9f1e8f787 100644 --- a/tests/upgrades/test_syncplan.py +++ b/tests/upgrades/test_syncplan.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_choice import pytest diff --git a/tests/upgrades/test_user.py b/tests/upgrades/test_user.py index b21da9b5ba1..bdcc3549739 100644 --- a/tests/upgrades/test_user.py +++ b/tests/upgrades/test_user.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + import pytest diff --git a/tests/upgrades/test_usergroup.py b/tests/upgrades/test_usergroup.py index 11ac95e2af8..36687f048db 100644 --- a/tests/upgrades/test_usergroup.py +++ b/tests/upgrades/test_usergroup.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest diff --git a/tests/upgrades/test_virtwho.py b/tests/upgrades/test_virtwho.py index 3691e3a8d70..0e9e15369d9 100644 --- a/tests/upgrades/test_virtwho.py +++ b/tests/upgrades/test_virtwho.py @@ -11,6 +11,7 @@ :CaseImportance: High """ + from fauxfactory import gen_string import pytest @@ -37,7 +38,7 @@ def form_data(target_sat): 'satellite_url': target_sat.hostname, 'hypervisor_username': esx.hypervisor_username, 'hypervisor_password': esx.hypervisor_password, - 'name': 'preupgrade_virt_who', + 'name': f'preupgrade_virt_who_{gen_string("alpha")}', } @@ -57,7 +58,7 @@ class TestScenarioPositiveVirtWho: @pytest.mark.pre_upgrade def test_pre_create_virt_who_configuration( - self, form_data, save_test_data, target_sat, function_entitlement_manifest + self, form_data, save_test_data, target_sat, module_sca_manifest_org ): """Create and deploy virt-who configuration. @@ -69,57 +70,28 @@ def test_pre_create_virt_who_configuration( 1. Config can be created and deployed by command. 2. No error msg in /var/log/rhsm/rhsm.log. 3. Report is sent to satellite. - 4. Virtual sku can be generated and attached. """ - org = target_sat.api.Organization(name=ORG_DATA['name']).create() - target_sat.api.Location(organization=[org]).create() - org.sca_disable() - target_sat.upload_manifest(org.id, function_entitlement_manifest.content) - form_data.update({'organization_id': org.id}) + form_data.update({'organization_id': module_sca_manifest_org.id}) vhd = target_sat.api.VirtWhoConfig(**form_data).create() assert vhd.status == 'unknown' - command = get_configure_command(vhd.id, org=org.name) + command = get_configure_command(vhd.id, org=module_sca_manifest_org.name) hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=org.label + command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label ) virt_who_instance = ( - target_sat.api.VirtWhoConfig(organization_id=org.id) + target_sat.api.VirtWhoConfig(organization_id=module_sca_manifest_org.id) .search(query={'search': f'name={form_data["name"]}'})[0] .status ) assert virt_who_instance == 'ok' - hosts = [ - (hypervisor_name, f'product_id={settings.virtwho.sku.vdc_physical} and type=NORMAL'), - (guest_name, f'product_id={settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED'), - ] - for hostname, sku in hosts: - host = target_sat.cli.Host.list({'search': hostname})[0] - subscriptions = target_sat.cli.Subscription.list( - {'organization-id': org.id, 'search': sku} - ) - vdc_id = subscriptions[0]['id'] - if 'type=STACK_DERIVED' in sku: - for item in subscriptions: - if hypervisor_name.lower() in item['type']: - vdc_id = item['id'] - break - target_sat.api.HostSubscription(host=host['id']).add_subscriptions( - data={'subscriptions': [{'id': vdc_id, 'quantity': 'Automatic'}]} - ) - result = ( - target_sat.api.Host(organization=org.id) - .search(query={'search': hostname})[0] - .read_json() - ) - assert result['subscription_status_label'] == 'Fully entitled' - save_test_data( { 'hypervisor_name': hypervisor_name, 'guest_name': guest_name, - 'org_id': org.id, - 'org_name': org.name, - 'org_label': org.label, + 'org_id': module_sca_manifest_org.id, + 'org_name': module_sca_manifest_org.name, + 'org_label': module_sca_manifest_org.label, + 'name': vhd.name, } ) @@ -146,15 +118,16 @@ def test_post_crud_virt_who_configuration(self, form_data, pre_upgrade_data, tar org_id = pre_upgrade_data.get('org_id') org_name = pre_upgrade_data.get('org_name') org_label = pre_upgrade_data.get('org_label') + name = pre_upgrade_data.get('name') # Post upgrade, Verify virt-who exists and has same status. vhd = target_sat.api.VirtWhoConfig(organization_id=org_id).search( - query={'search': f'name={form_data["name"]}'} + query={'search': f'name={name}'} )[0] if not is_open('BZ:1802395'): assert vhd.status == 'ok' # Verify virt-who status via CLI as we cannot check it via API now - vhd_cli = target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) + vhd_cli = target_sat.cli.VirtWhoConfig.exists(search=('name', name)) assert ( target_sat.cli.VirtWhoConfig.info({'id': vhd_cli['id']})['general-information'][ 'status' @@ -172,11 +145,11 @@ def test_post_crud_virt_who_configuration(self, form_data, pre_upgrade_data, tar .search(query={'search': hostname})[0] .read_json() ) - assert result['subscription_status_label'] == 'Fully entitled' + assert result['subscription_status_label'] == 'Simple Content Access' # Verify the virt-who config-file exists. config_file = get_configure_file(vhd.id) - get_configure_option('hypervisor_id', config_file), + get_configure_option('hypervisor_id', config_file) # Verify Report is sent to satellite. command = get_configure_command(vhd.id, org=org_name) @@ -185,7 +158,7 @@ def test_post_crud_virt_who_configuration(self, form_data, pre_upgrade_data, tar ) virt_who_instance = ( target_sat.api.VirtWhoConfig(organization_id=org_id) - .search(query={'search': f'name={form_data["name"]}'})[0] + .search(query={'search': f'name={name}'})[0] .status ) assert virt_who_instance == 'ok'