From af3566d3af789ad1defa4fcf02c8b4afa748ed20 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Mon, 7 Oct 2024 17:43:20 +0200 Subject: [PATCH 01/33] fix: environment variables not being passed to MAPDL process (#3461) * fix: envvars ignoring current environment * fix: envvars ignoring current environment * feat: getting env vars in `launch_mapdl` * ci: auto fixes from pre-commit.com hooks. for more information, see https://pre-commit.ci * chore: adding changelog file 3461.fixed.md * fix: test --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3461.fixed.md | 1 + src/ansys/mapdl/core/launcher.py | 23 ++++++++++++++++------- tests/test_launcher.py | 3 ++- 3 files changed, 19 insertions(+), 8 deletions(-) create mode 100644 doc/changelog.d/3461.fixed.md diff --git a/doc/changelog.d/3461.fixed.md b/doc/changelog.d/3461.fixed.md new file mode 100644 index 0000000000..76f0aad061 --- /dev/null +++ b/doc/changelog.d/3461.fixed.md @@ -0,0 +1 @@ +fix: environment variables not being passed to MAPDL process \ No newline at end of file diff --git a/src/ansys/mapdl/core/launcher.py b/src/ansys/mapdl/core/launcher.py index 349602c667..ceafbbf806 100644 --- a/src/ansys/mapdl/core/launcher.py +++ b/src/ansys/mapdl/core/launcher.py @@ -602,7 +602,7 @@ def launch_grpc( env_vars = update_env_vars(add_env_vars, replace_env_vars) LOG.info( - f"Running a local instance at port {port} the following command: '{command}'" + f"Running a local instance in {run_location} at port {port} the following command: '{command}'" ) LOG.debug("MAPDL starting in background.") @@ -681,6 +681,7 @@ def _check_server_is_alive(stdout_queue, run_location, timeout): empty_i = 0 terminal_output = "" + LOG.debug(f"Checking if MAPDL server is alive") while time.time() < (t0 + timeout): terminal_output += "\n".join(_get_std_output(std_queue=stdout_queue)).strip() @@ -700,6 +701,9 @@ def _check_server_is_alive(stdout_queue, run_location, timeout): break else: + LOG.debug( + f"MAPDL gRPC server didn't print any valid output:\n{terminal_output}" + ) raise MapdlDidNotStart("MAPDL failed to start the gRPC server") @@ -1768,6 +1772,9 @@ def launch_mapdl( f"The machine has {machine_cores} cores. PyMAPDL is asking for {nproc} cores." ) + # Setting env vars + env_vars = update_env_vars(add_env_vars, replace_env_vars) + start_parm.update( { "exec_file": exec_file, @@ -1811,8 +1818,7 @@ def launch_mapdl( port, actual_run_location, process = launch_grpc( port=port, - add_env_vars=add_env_vars, - replace_env_vars=replace_env_vars, + replace_env_vars=env_vars, **start_parm, ) @@ -1917,7 +1923,7 @@ def check_mode(mode, version): return mode -def update_env_vars(add_env_vars, replace_env_vars): +def update_env_vars(add_env_vars: dict, replace_env_vars: dict) -> dict: """ Update environment variables for the MAPDL process. @@ -1939,6 +1945,8 @@ def update_env_vars(add_env_vars, replace_env_vars): """ # Expanding/replacing env variables for the process. + envvars = os.environ.copy() + if add_env_vars and replace_env_vars: raise ValueError( "'add_env_vars' and 'replace_env_vars' are incompatible. Please provide only one." @@ -1950,9 +1958,8 @@ def update_env_vars(add_env_vars, replace_env_vars): "The variable 'add_env_vars' should be a dict with env vars." ) - add_env_vars.update(os.environ) + envvars.update(add_env_vars) LOG.debug(f"Updating environment variables with: {add_env_vars}") - return add_env_vars elif replace_env_vars: if not isinstance(replace_env_vars, dict): @@ -1960,7 +1967,9 @@ def update_env_vars(add_env_vars, replace_env_vars): "The variable 'replace_env_vars' should be a dict with env vars." ) LOG.debug(f"Replacing environment variables with: {replace_env_vars}") - return replace_env_vars + envvars = replace_env_vars + + return envvars def _check_license_argument(license_type, additional_switches): diff --git a/tests/test_launcher.py b/tests/test_launcher.py index 5be0570c31..0e4bfd396b 100644 --- a/tests/test_launcher.py +++ b/tests/test_launcher.py @@ -256,7 +256,8 @@ def test_remove_temp_files_fail(tmpdir, mapdl): def test_env_injection(): - assert update_env_vars(None, None) is None + no_inject = update_env_vars(None, None) + assert no_inject == os.environ.copy() # return os.environ assert "myenvvar" in update_env_vars({"myenvvar": "True"}, None) From 05128b6a8b666de3f9ef17a45ac99f5169076879 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Mon, 7 Oct 2024 17:43:34 +0200 Subject: [PATCH 02/33] fix: exiting earlier to avoid exceptions from gRPC calls (#3463) * feat: adding logging and avoid recursively trying to exit even if we have already tried. * feat: adding logging and avoid recursively trying to exit even if we have already tried. * feat: avoid issuing gRPC calls when exiting. Added more logging while exiting. * chore: adding changelog file 3463.fixed.md * fix: test --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3463.fixed.md | 1 + src/ansys/mapdl/core/errors.py | 3 +++ src/ansys/mapdl/core/mapdl_grpc.py | 27 +++++++++++++++++++++++++-- 3 files changed, 29 insertions(+), 2 deletions(-) create mode 100644 doc/changelog.d/3463.fixed.md diff --git a/doc/changelog.d/3463.fixed.md b/doc/changelog.d/3463.fixed.md new file mode 100644 index 0000000000..f7d688caa4 --- /dev/null +++ b/doc/changelog.d/3463.fixed.md @@ -0,0 +1 @@ +fix: exiting earlier to avoid exceptions from gRPC calls \ No newline at end of file diff --git a/src/ansys/mapdl/core/errors.py b/src/ansys/mapdl/core/errors.py index 3500984ac6..19a09e22ba 100644 --- a/src/ansys/mapdl/core/errors.py +++ b/src/ansys/mapdl/core/errors.py @@ -323,6 +323,7 @@ def wrapper(*args, **kwargs): except grpc.RpcError as error: mapdl = retrieve_mapdl_from_args(args) + mapdl._log.debug("A gRPC error has been detected.") i_attemps += 1 if i_attemps <= n_attempts: @@ -443,9 +444,11 @@ def handle_generic_grpc_error(error, func, args, kwargs, reason="", suggestion=" else: # Making sure we do not keep executing gRPC calls. mapdl._exited = True + mapdl._exiting = True # Must close unfinished processes mapdl._close_process() + mapdl._exiting = False raise MapdlExitedError(msg) diff --git a/src/ansys/mapdl/core/mapdl_grpc.py b/src/ansys/mapdl/core/mapdl_grpc.py index 632c38b80b..e6ec30ca5c 100644 --- a/src/ansys/mapdl/core/mapdl_grpc.py +++ b/src/ansys/mapdl/core/mapdl_grpc.py @@ -1061,14 +1061,19 @@ def exit(self, save=False, force=False, **kwargs): >>> mapdl.exit() """ # check if permitted to start (and hence exit) instances + self._log.debug( + f"Exiting MAPLD gRPC instance {self.ip}:{self.port} on '{self._path}'." + ) if self._exited is None: + self._log.debug("'self._exited' is none.") return # Some edge cases the class object is not completely initialized but the __del__ method # is called when exiting python. So, early exit here instead an error in the following # self.directory command. # See issue #1796 elif self._exited: # Already exited. + self._log.debug("Already exited") return if save: @@ -1091,10 +1096,9 @@ def exit(self, save=False, force=False, **kwargs): return self._exiting = True - self._log.debug("Exiting MAPDL") if not kwargs.pop("fake_exit", False): - # This cannot should not be faked + # This cannot/should not be faked if self._local: mapdl_path = self.directory self._cache_pids() # Recache processes @@ -1152,6 +1156,10 @@ def _kill_server(self): a local process. """ + if self._exited: + self._log.debug("MAPDL server already exited") + return + try: self._log.debug("Killing MAPDL server") except ValueError: @@ -1233,6 +1241,7 @@ def _close_process(self, timeout=2): # pragma: no cover if self.is_alive: raise MapdlRuntimeError("MAPDL could not be exited.") else: + self._log.debug("All MAPDL processes exited") self._exited = True def _cache_pids(self): @@ -1243,6 +1252,7 @@ def _cache_pids(self): processes. """ + self._log.debug("Caching PIDs") self._pids = [] for filename in self.list_files(): @@ -1264,11 +1274,15 @@ def _cache_pids(self): try: parent = psutil.Process(parent_pid) except psutil.NoSuchProcess: + self._log.debug(f"Parent process does not exist.") return + children = parent.children(recursive=True) self._pids = [parent_pid] + [each.pid for each in children] + self._log.debug(f"Recaching PIDs: {self._pids}") + def _remove_lock_file(self, mapdl_path=None): """Removes the lock file. @@ -2625,10 +2639,16 @@ def is_alive(self) -> bool: if self._exited: self._log.debug("MAPDL instance is not alive because it is exited.") return False + if self.busy: self._log.debug("MAPDL instance is alive because it is busy.") return True + if self._exiting: + # It should be exiting so we should not issue gRPC calls + self._log.debug("MAPDL instance is expected to be exiting") + return False + try: check = bool(self._ctrl("VERSION")) if check: @@ -2642,6 +2662,9 @@ def is_alive(self) -> bool: return check except Exception as error: + if self._exited: + return False + self._log.debug( f"MAPDL instance is not alive because retrieving version failed with:\n{error}" ) From 57f66f14650f28fc7a673c79a5dc46e451815390 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Tue, 8 Oct 2024 13:20:18 +0200 Subject: [PATCH 03/33] ci: bypass team check if it is dependabot (#3472) * ci: bypass team check if it is dependabot * chore: adding changelog file 3472.maintenance.md --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- .github/workflows/ci.yml | 6 ++++-- doc/changelog.d/3472.maintenance.md | 1 + 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 doc/changelog.d/3472.maintenance.md diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 543cc91b08..ae1ab4d60f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -376,6 +376,7 @@ jobs: - uses: tspascoal/get-user-teams-membership@v3 id: is_organization_member + if: ${{ github.actor != 'dependabot[bot]' }} with: username: ${{ steps.get_user.outputs.user }} organization: ansys @@ -385,7 +386,7 @@ jobs: - id: set-matrix env: extended_testing: ${{ github.event_name == 'schedule' || ( github.event_name == 'workflow_dispatch' && inputs.run_all_tests ) || ( github.event_name == 'push' && contains(github.ref, 'refs/tags') ) }} - auth_user: ${{ steps.is_organization_member.outputs.isTeamMember == 'true' }} + auth_user: ${{ steps.is_organization_member.outputs.isTeamMember == 'true' || github.actor == 'dependabot[bot]' }} run: .ci/build_matrix.sh build-test-remote: @@ -599,6 +600,7 @@ jobs: - uses: tspascoal/get-user-teams-membership@v3 id: is_organization_member + if: ${{ github.actor != 'dependabot[bot]' }} with: username: ${{ github.actor }} organization: ansys @@ -614,7 +616,7 @@ jobs: RUN_ALL_TEST: ${{ inputs.run_all_tests }} ON_PUSH: ${{ github.event_name == 'push' }} HAS_TAG: ${{ contains(github.ref, 'refs/tags') }} - auth_user: ${{ steps.is_organization_member.outputs.isTeamMember == 'true' }} + auth_user: ${{ steps.is_organization_member.outputs.isTeamMember == 'true' || github.actor == 'dependabot[bot]' }} run: .ci/build_matrix.sh build-test-ubuntu-local: diff --git a/doc/changelog.d/3472.maintenance.md b/doc/changelog.d/3472.maintenance.md new file mode 100644 index 0000000000..611a7b7bbc --- /dev/null +++ b/doc/changelog.d/3472.maintenance.md @@ -0,0 +1 @@ +ci: bypass team check if it is dependabot \ No newline at end of file From 4b64ad44d0f4ad930deb3ebd5f30a44cb5423df6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 11:28:34 +0000 Subject: [PATCH 04/33] ci: pre-commit autoupdate (#3471) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ci: pre-commit autoupdate updates: - [github.com/psf/black: 24.8.0 → 24.10.0](https://github.com/psf/black/compare/24.8.0...24.10.0) - [github.com/adamchainz/blacken-docs: 1.18.0 → 1.19.0](https://github.com/adamchainz/blacken-docs/compare/1.18.0...1.19.0) - [github.com/pre-commit/pre-commit-hooks: v4.6.0 → v5.0.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.6.0...v5.0.0) * chore: adding changelog file 3471.maintenance.md * feat: update .pre-commit-config.yaml --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> Co-authored-by: German <28149841+germa89@users.noreply.github.com> --- .pre-commit-config.yaml | 8 ++++---- doc/changelog.d/3471.maintenance.md | 1 + 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 doc/changelog.d/3471.maintenance.md diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0c6089c167..06938f50fa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,17 +39,17 @@ repos: ) - repo: https://github.com/psf/black - rev: 24.8.0 # If version changes --> modify "blacken-docs" manually as well. + rev: 24.10.0 # If version changes --> modify "blacken-docs" manually as well. hooks: - id: black args: - --line-length=88 - repo: https://github.com/adamchainz/blacken-docs - rev: 1.18.0 + rev: 1.19.0 hooks: - id: blacken-docs - additional_dependencies: [black==24.8.0] + additional_dependencies: [black==24.10.0] - repo: https://github.com/PyCQA/flake8 rev: 7.1.1 @@ -71,7 +71,7 @@ repos: # exclude: "tests/" - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: check-merge-conflict - id: debug-statements diff --git a/doc/changelog.d/3471.maintenance.md b/doc/changelog.d/3471.maintenance.md new file mode 100644 index 0000000000..23264207b9 --- /dev/null +++ b/doc/changelog.d/3471.maintenance.md @@ -0,0 +1 @@ +ci: pre-commit autoupdate \ No newline at end of file From 6293abecf855c38b39150d61af11beafda87bb58 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 15:46:43 +0000 Subject: [PATCH 05/33] ci: bump the actions group with 2 updates (#3470) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ci: bump the actions group with 2 updates Bumps the actions group with 2 updates: [actions/checkout](https://github.com/actions/checkout) and [ansys/actions](https://github.com/ansys/actions). Updates `actions/checkout` from 4.2.0 to 4.2.1 - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v4.2.0...v4.2.1) Updates `ansys/actions` from 7 to 8 - [Release notes](https://github.com/ansys/actions/releases) - [Commits](https://github.com/ansys/actions/compare/v7...v8) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch dependency-group: actions - dependency-name: ansys/actions dependency-type: direct:production update-type: version-update:semver-major dependency-group: actions ... Signed-off-by: dependabot[bot] * ci: pre-commit autoupdate (#3471) * ci: pre-commit autoupdate updates: - [github.com/psf/black: 24.8.0 → 24.10.0](https://github.com/psf/black/compare/24.8.0...24.10.0) - [github.com/adamchainz/blacken-docs: 1.18.0 → 1.19.0](https://github.com/adamchainz/blacken-docs/compare/1.18.0...1.19.0) - [github.com/pre-commit/pre-commit-hooks: v4.6.0 → v5.0.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.6.0...v5.0.0) * chore: adding changelog file 3471.maintenance.md * feat: update .pre-commit-config.yaml --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> Co-authored-by: German <28149841+germa89@users.noreply.github.com> * ci: upgrade to v8 * ci: bump the actions group with 2 updates Bumps the actions group with 2 updates: [actions/checkout](https://github.com/actions/checkout) and [ansys/actions](https://github.com/ansys/actions). Updates `actions/checkout` from 4.2.0 to 4.2.1 - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v4.2.0...v4.2.1) Updates `ansys/actions` from 7 to 8 - [Release notes](https://github.com/ansys/actions/releases) - [Commits](https://github.com/ansys/actions/compare/v7...v8) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch dependency-group: actions - dependency-name: ansys/actions dependency-type: direct:production update-type: version-update:semver-major dependency-group: actions ... Signed-off-by: dependabot[bot] * ci: upgrade to v8 * Update .github/workflows/label.yml * chore: adding changelog file 3470.maintenance.md [dependabot-skip] * fix: indentation --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> Co-authored-by: German <28149841+germa89@users.noreply.github.com> --- .github/workflows/cache_cleaner.yml | 2 +- .github/workflows/ci.yml | 96 +++++---------------- .github/workflows/docker_clean_untagged.yml | 2 +- .github/workflows/label.yml | 6 +- .github/workflows/linkchecker.yml | 2 +- doc/changelog.d/3470.maintenance.md | 1 + 6 files changed, 31 insertions(+), 78 deletions(-) create mode 100644 doc/changelog.d/3470.maintenance.md diff --git a/.github/workflows/cache_cleaner.yml b/.github/workflows/cache_cleaner.yml index ed8c958460..4841e32f41 100644 --- a/.github/workflows/cache_cleaner.yml +++ b/.github/workflows/cache_cleaner.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out code - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.1 - name: Cleanup PR caches if: github.event_name != 'workflow_dispatch' diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ae1ab4d60f..e85457424f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -70,19 +70,11 @@ jobs: contents: write pull-requests: write steps: - - uses: ansys/actions/doc-deploy-changelog@v7 + - uses: ansys/actions/doc-deploy-changelog@v8 with: token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} - - - branch-name: - # Only if the event is a pull request and the branch name is not from the pre-commit-ci bot - if: github.event_name == 'pull_request' && github.head_ref != 'pre-commit-ci-update-config' - name: Check the name of the branch - runs-on: ubuntu-latest - steps: - - name: Check branch name - uses: ansys/actions/branch-name-style@v7 + bot-user: ${{ secrets.PYANSYS_CI_BOT_USERNAME }} + bot-email: ${{ secrets.PYANSYS_CI_BOT_EMAIL }} pull-request-name: @@ -91,7 +83,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check pull-request name - uses: ansys/actions/commit-style@v7 + uses: ansys/actions/check-pr-title@v8 with: token: ${{ secrets.GITHUB_TOKEN }} @@ -104,7 +96,7 @@ jobs: folder: ["doc", "examples"] steps: - name: "Ansys documentation style checks" - uses: ansys/actions/doc-style@v7 + uses: ansys/actions/doc-style@v8 with: token: ${{ secrets.GITHUB_TOKEN }} files: ${{ matrix.folder }} @@ -130,7 +122,7 @@ jobs: os: macos-latest steps: - name: "Build wheelhouse and perform smoke test" - uses: ansys/actions/build-wheelhouse@v7 + uses: ansys/actions/build-wheelhouse@v8 with: library-name: ${{ env.PACKAGE_NAME }} operating-system: ${{ matrix.os }} @@ -160,7 +152,7 @@ jobs: GRPC_ENABLE_FORK_SUPPORT: false # See #3434 steps: - name: "Install Git and checkout project" - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.1 - name: "Login in Github container registry" uses: docker/login-action@v3.3.0 @@ -357,7 +349,7 @@ jobs: steps: - name: "Install Git and checkout project" - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.1 - name: Get event type and user to check permissions. id: get_user @@ -412,7 +404,7 @@ jobs: steps: - name: "Install Git and checkout project" - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.1 - name: "Login in Github container registry" uses: docker/login-action@v3.3.0 @@ -596,7 +588,7 @@ jobs: matrix: ${{ steps.set-matrix.outputs.matrix }} steps: - name: "Install Git and checkout project" - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.1 - uses: tspascoal/get-user-teams-membership@v3 id: is_organization_member @@ -643,7 +635,7 @@ jobs: steps: - name: "Install Git and checkout project" - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.1 with: repository: ${{ github.event.pull_request.head.repo.full_name }} ref: ${{ github.event.pull_request.head.ref }} @@ -774,7 +766,7 @@ jobs: steps: - name: "Install Git and checkout project" - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.1 with: repository: ${{ github.event.pull_request.head.repo.full_name }} ref: ${{ github.event.pull_request.head.ref }} @@ -869,7 +861,7 @@ jobs: ON_LOCAL: TRUE steps: - - uses: actions/checkout@v4.2.0 + - uses: actions/checkout@v4.2.1 # Skipping because it is installed locally. # - name: Setup Python @@ -937,7 +929,7 @@ jobs: runs-on: ubuntu-latest steps: - name: "Build library source and wheel artifacts" - uses: ansys/actions/build-library@v7 + uses: ansys/actions/build-library@v8 with: library-name: ${{ env.PACKAGE_NAME }} python-version: ${{ env.MAIN_PYTHON_VERSION }} @@ -955,13 +947,13 @@ jobs: contents: write steps: - name: "Release to the public PyPI repository" - uses: ansys/actions/release-pypi-public@v7 + uses: ansys/actions/release-pypi-public@v8 with: library-name: ${{ env.PACKAGE_NAME }} use-trusted-publisher: true - name: "Release to GitHub" - uses: ansys/actions/release-github@v7 + uses: ansys/actions/release-github@v8 with: library-name: ${{ env.PACKAGE_NAME }} additional-artifacts: "minimum_requirements.txt" @@ -977,42 +969,13 @@ jobs: needs: [release] steps: - name: "Deploy the stable documentation" - # TODO: testing SEO improvements. This branch avoids creating a - # sitemap.xml pages in opposite to v5. - uses: ansys/actions/doc-deploy-stable@feat/seo-improvements + uses: ansys/actions/doc-deploy-stable@v8 with: cname: ${{ env.DOCUMENTATION_CNAME }} token: ${{ secrets.GITHUB_TOKEN }} render-last: '5' - - - doc-index-stable: - name: "Deploy stable docs index" - runs-on: ubuntu-latest - needs: upload-docs-release - steps: - - name: "Install Git and clone project" - uses: actions/checkout@v4.2.0 - - - name: "Install the package requirements" - run: pip install -e . - - - name: "Get the version to PyMeilisearch" - run: | - VERSION=$(python -c "from ansys.mapdl.core import __version__; print('.'.join(__version__.split('.')[:2]))") - VERSION_MEILI=$(python -c "from ansys.mapdl.core import __version__; print('-'.join(__version__.split('.')[:2]))") - echo "Calculated VERSION: $VERSION" - echo "Calculated VERSION_MEILI: $VERSION_MEILI" - echo "VERSION=$VERSION" >> $GITHUB_ENV - echo "VERSION_MEILI=$VERSION_MEILI" >> $GITHUB_ENV - - - name: "Deploy the latest documentation index" - uses: ansys/actions/doc-deploy-index@v7 - with: - cname: ${{ env.DOCUMENTATION_CNAME }}/version/${{ env.VERSION }} - index-name: pymapdl-v${{ env.VERSION_MEILI }} - host-url: ${{ vars.MEILISEARCH_HOST_URL }} - api-key: ${{ env.MEILISEARCH_API_KEY }} + bot-user: ${{ secrets.PYANSYS_CI_BOT_USERNAME }} + bot-email: ${{ secrets.PYANSYS_CI_BOT_EMAIL }} upload-dev-docs: @@ -1022,25 +985,12 @@ jobs: needs: [docs-build] steps: - name: "Deploy the latest documentation" - # TODO: testing SEO improvements. This branch reuses the "index.html" from the stable version - uses: ansys/actions/doc-deploy-dev@feat/seo-improvements + uses: ansys/actions/doc-deploy-dev@v8 with: cname: ${{ env.DOCUMENTATION_CNAME }} token: ${{ secrets.GITHUB_TOKEN }} - - - doc-index-dev: - name: "Deploy dev index docs" - runs-on: ubuntu-latest - needs: upload-dev-docs - steps: - - name: "Deploy the latest documentation index" - uses: ansys/actions/doc-deploy-index@v7 - with: - cname: ${{ env.DOCUMENTATION_CNAME }}/version/dev - index-name: pymapdl-vdev - host-url: ${{ vars.MEILISEARCH_HOST_URL }} - api-key: ${{ env.MEILISEARCH_API_KEY }} + bot-user: ${{ secrets.PYANSYS_CI_BOT_USERNAME }} + bot-email: ${{ secrets.PYANSYS_CI_BOT_EMAIL }} notify: @@ -1068,7 +1018,7 @@ jobs: os: [ubuntu-latest, windows-latest] steps: - - uses: actions/checkout@v4.2.0 + - uses: actions/checkout@v4.2.1 - name: "Set up Julia" uses: julia-actions/setup-julia@v2 with: diff --git a/.github/workflows/docker_clean_untagged.yml b/.github/workflows/docker_clean_untagged.yml index d33bf114cc..19d1aa024a 100644 --- a/.github/workflows/docker_clean_untagged.yml +++ b/.github/workflows/docker_clean_untagged.yml @@ -22,7 +22,7 @@ jobs: steps: - name: "Perform versions cleanup - except certain tags" - uses: ansys/actions/hk-package-clean-untagged@v7 + uses: ansys/actions/hk-package-clean-untagged@v8 with: package-name: 'mapdl' token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/label.yml b/.github/workflows/label.yml index 45e9c7f6aa..66348b684f 100644 --- a/.github/workflows/label.yml +++ b/.github/workflows/label.yml @@ -21,7 +21,7 @@ jobs: name: Syncer runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4.2.0 + - uses: actions/checkout@v4.2.1 - uses: micnncim/action-label-syncer@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -127,6 +127,8 @@ jobs: pull-requests: write runs-on: ubuntu-latest steps: - - uses: ansys/actions/doc-changelog@v7 + - uses: ansys/actions/doc-changelog@v8 with: + bot-user: ${{ secrets.PYANSYS_CI_BOT_USERNAME }} + bot-email: ${{ secrets.PYANSYS_CI_BOT_EMAIL }} token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/linkchecker.yml b/.github/workflows/linkchecker.yml index b49043b3da..285e0fe889 100644 --- a/.github/workflows/linkchecker.yml +++ b/.github/workflows/linkchecker.yml @@ -31,7 +31,7 @@ jobs: SHELLOPTS: "errexit:pipefail" steps: - name: "Install Git and checkout project" - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.1 - name: "Login in Github container registry" uses: docker/login-action@v3.3.0 diff --git a/doc/changelog.d/3470.maintenance.md b/doc/changelog.d/3470.maintenance.md new file mode 100644 index 0000000000..031d764761 --- /dev/null +++ b/doc/changelog.d/3470.maintenance.md @@ -0,0 +1 @@ +ci: bump the actions group with 2 updates \ No newline at end of file From e327e4df22e568c287bcd1059abc3825aba8dd2c Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Wed, 9 Oct 2024 17:49:43 +0200 Subject: [PATCH 06/33] refactor: removing deprecated arguments (#3473) * refactor: removing deprecated arguments * chore: adding changelog file 3473.added.md [dependabot-skip] * test: adapting tests * fix: removing traces from deprecated arguments. --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3473.added.md | 1 + src/ansys/mapdl/core/cli/start.py | 26 --------------- src/ansys/mapdl/core/launcher.py | 53 +----------------------------- src/ansys/mapdl/core/mapdl_grpc.py | 11 ------- src/ansys/mapdl/core/pool.py | 14 ++++---- tests/test_cli.py | 2 -- tests/test_launcher.py | 21 +++--------- tests/test_pool.py | 2 +- 8 files changed, 15 insertions(+), 115 deletions(-) create mode 100644 doc/changelog.d/3473.added.md diff --git a/doc/changelog.d/3473.added.md b/doc/changelog.d/3473.added.md new file mode 100644 index 0000000000..4cf3d59e82 --- /dev/null +++ b/doc/changelog.d/3473.added.md @@ -0,0 +1 @@ +refactor: removing deprecated arguments \ No newline at end of file diff --git a/src/ansys/mapdl/core/cli/start.py b/src/ansys/mapdl/core/cli/start.py index 7bd1ee8110..7c8332a96c 100644 --- a/src/ansys/mapdl/core/cli/start.py +++ b/src/ansys/mapdl/core/cli/start.py @@ -125,24 +125,12 @@ type=str, help="Argument not allowed in CLI. It will be ignored.", ) -@click.option( - "--remove_temp_files", - default=None, - type=str, - help="Argument not allowed in CLI. It will be ignored.", -) @click.option( "--remove_temp_dir_on_exit", default=False, type=bool, help="Argument not allowed in CLI. It will be ignored.", ) -@click.option( - "--verbose_mapdl", - default=None, - type=str, - help="Argument not allowed in CLI. It will be ignored.", -) @click.option( "--license_server_check", default=False, @@ -196,9 +184,7 @@ def start( ip: str, clear_on_connect: bool, # ignored log_apdl: bool, # ignored - remove_temp_files: bool, # ignored remove_temp_dir_on_exit: bool, # ignored - verbose_mapdl: bool, # ignored license_server_check: bool, # ignored license_type: str, print_com: bool, # ignored @@ -250,24 +236,12 @@ def start( + " The following argument is not allowed in CLI: 'log_apdl'.\nIgnoring argument." ) - if remove_temp_files: - click.echo( - click.style("Warn:", fg="yellow") - + " The following argument is not allowed in CLI: 'remove_temp_files'.\nIgnoring argument." - ) - if remove_temp_dir_on_exit: click.echo( click.style("Warn:", fg="yellow") + " The following argument is not allowed in CLI: 'remove_temp_dir_on_exit'.\nIgnoring argument." ) - if verbose_mapdl: - click.echo( - click.style("Warn:", fg="yellow") - + " The following argument is not allowed in CLI: 'verbose_mapdl'.\nIgnoring argument." - ) - if print_com: click.echo( click.style("Warn:", fg="yellow") diff --git a/src/ansys/mapdl/core/launcher.py b/src/ansys/mapdl/core/launcher.py index ceafbbf806..48015b37c5 100644 --- a/src/ansys/mapdl/core/launcher.py +++ b/src/ansys/mapdl/core/launcher.py @@ -292,7 +292,7 @@ def launch_grpc( verbose: Optional[bool] = None, add_env_vars: Optional[Dict[str, str]] = None, replace_env_vars: Optional[Dict[str, str]] = None, - **kwargs, # to keep compatibility with corba and console interface. + **kwargs, # to keep compatibility with console interface. ) -> Tuple[int, str, subprocess.Popen]: """Start MAPDL locally in gRPC mode. @@ -1084,9 +1084,7 @@ def launch_mapdl( ip: Optional[str] = None, clear_on_connect: bool = True, log_apdl: Optional[Union[bool, str]] = None, - remove_temp_files: Optional[bool] = None, remove_temp_dir_on_exit: bool = False, - verbose_mapdl: Optional[bool] = None, license_server_check: bool = True, license_type: Optional[bool] = None, print_com: bool = False, @@ -1206,16 +1204,6 @@ def launch_mapdl( PyMAPDL. This argument is the path of the output file (e.g. ``log_apdl='pymapdl_log.txt'``). By default this is disabled. - remove_temp_files : bool, optional - When ``run_location`` is ``None``, this launcher creates a new MAPDL - working directory within the user temporary directory, obtainable with - ``tempfile.gettempdir()``. When this parameter is - ``True``, this directory will be deleted when MAPDL is exited. Default - ``False``. - - .. deprecated:: 0.64.0 - Use argument ``remove_temp_dir_on_exit`` instead. - remove_temp_dir_on_exit : bool, optional When ``run_location`` is ``None``, this launcher creates a new MAPDL working directory within the user temporary directory, obtainable with @@ -1225,16 +1213,6 @@ def launch_mapdl( If you change the working directory, PyMAPDL does not delete the original working directory nor the new one. - verbose_mapdl : bool, optional - Enable printing of all output when launching and running - MAPDL. This should be used for debugging only as output can - be tracked within pymapdl. Default ``False``. - - .. deprecated:: v0.65.0 - The ``verbose_mapdl`` argument is deprecated and will be completely - removed in a future release. - Use a logger instead. See :ref:`api_logging` for more details. - license_server_check : bool, optional Check if the license server is available if MAPDL fails to start. Only available on ``mode='grpc'``. Defaults ``True``. @@ -1485,33 +1463,11 @@ def launch_mapdl( else: ON_SLURM = False - if remove_temp_files is not None: - warnings.warn( - "The ``remove_temp_files`` option is being deprecated. It is to be removed in PyMAPDL version 0.66.0.\n" - "Please use ``remove_temp_dir_on_exit`` instead.", - DeprecationWarning, - stacklevel=2, - ) - remove_temp_dir_on_exit = remove_temp_files - remove_temp_files = None - - if verbose_mapdl is not None: - raise DeprecationError( - "The ``verbose_mapdl`` argument is deprecated and will be completely removed in a future release. Use a logger instead. " - "See https://mapdl.docs.pyansys.com/version/stable/api/logging.html for more details." - ) - # These parameters are partially used for unit testing set_no_abort = kwargs.pop("set_no_abort", True) # Extract arguments: force_intel = kwargs.pop("force_intel", False) - broadcast = kwargs.pop("log_broadcast", False) - if broadcast: - raise ValueError( - "The CORBA interface has been deprecated from 0.67." - "Hence this argument is not valid." - ) use_vtk = kwargs.pop("use_vtk", None) just_launch = kwargs.pop("just_launch", None) on_pool = kwargs.pop("on_pool", False) @@ -1880,13 +1836,6 @@ def check_mode(mode, version): ) elif os.name == "posix": raise VersionError("gRPC mode requires MAPDL 2021R1 or newer.") - elif mode == "corba": - raise DeprecationError( - "The CORBA interface has been deprecated with the" - " v0.67 release. Please use the gRPC interface instead.\n" - "For more information visit: " - "https://mapdl.docs.pyansys.com/version/0.66/getting_started/versioning.html#corba-interface" - ) elif mode == "console": if os.name == "nt": diff --git a/src/ansys/mapdl/core/mapdl_grpc.py b/src/ansys/mapdl/core/mapdl_grpc.py index e6ec30ca5c..a83dfcc7c5 100644 --- a/src/ansys/mapdl/core/mapdl_grpc.py +++ b/src/ansys/mapdl/core/mapdl_grpc.py @@ -337,7 +337,6 @@ def __init__( cleanup_on_exit: bool = False, log_apdl: Optional[str] = None, set_no_abort: bool = True, - remove_temp_files: Optional[bool] = None, remove_temp_dir_on_exit: bool = False, print_com: bool = False, disable_run_at_connect: bool = False, @@ -346,16 +345,6 @@ def __init__( **start_parm, ): """Initialize connection to the mapdl server""" - if remove_temp_files is not None: # pragma: no cover - warn( - "The option ``remove_temp_files`` is being deprecated and it will be removed by PyMAPDL version 0.66.0.\n" - "Please use ``remove_temp_dir_on_exit`` instead.", - DeprecationWarning, - stacklevel=2, - ) - remove_temp_dir_on_exit = remove_temp_files - remove_temp_files = None - self._name: Optional[str] = None self._session_id_: Optional[str] = None self._checking_session_id_: bool = False diff --git a/src/ansys/mapdl/core/pool.py b/src/ansys/mapdl/core/pool.py index 16b8c94159..114b11cf03 100755 --- a/src/ansys/mapdl/core/pool.py +++ b/src/ansys/mapdl/core/pool.py @@ -127,12 +127,14 @@ class MapdlPool: restart_failed : bool, optional Restarts any failed instances in the pool. Defaults to ``True``. - remove_temp_files : bool, optional - This launcher creates a new MAPDL working directory for each instance - of MAPDL within the temporary user directory, obtainable with - ``tempfile.gettempdir()``, for MAPDL files. When this parameter is + remove_temp_dir_on_exit : bool, optional + When ``run_location`` is ``None``, this launcher creates a new MAPDL + working directory within the user temporary directory, obtainable with + ``tempfile.gettempdir()``. When this parameter is ``True``, this directory will be deleted when MAPDL is exited. Default ``False``. + If you change the working directory, PyMAPDL does not delete the original + working directory nor the new one. names : str, Callable, optional You can specify the names of the directories where the instances are @@ -206,7 +208,7 @@ def __init__( port: Union[int, List[int]] = MAPDL_DEFAULT_PORT, progress_bar: bool = DEFAULT_PROGRESS_BAR, restart_failed: bool = True, - remove_temp_files: bool = True, + remove_temp_dir_on_exit: bool = True, names: Optional[str] = None, override=True, start_instance: bool = None, @@ -225,7 +227,7 @@ def __init__( run_location = create_temp_dir() self._root_dir: str = run_location - kwargs["remove_temp_files"] = remove_temp_files + kwargs["remove_temp_dir_on_exit"] = remove_temp_dir_on_exit kwargs["mode"] = "grpc" self._spawn_kwargs: Dict[str, Any] = kwargs self._spawning_i: int = 0 diff --git a/tests/test_cli.py b/tests/test_cli.py index 2925bc1804..09ae9dbb40 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -94,9 +94,7 @@ def test_launch_mapdl_cli_config(run_cli): "start_instance", "clear_on_connect", "log_apdl", - "remove_temp_files", "remove_temp_dir_on_exit", - "verbose_mapdl", "print_com", "add_env_vars", "replace_env_vars", diff --git a/tests/test_launcher.py b/tests/test_launcher.py index 0e4bfd396b..86e6dc8b54 100644 --- a/tests/test_launcher.py +++ b/tests/test_launcher.py @@ -31,7 +31,6 @@ from ansys.mapdl import core as pymapdl from ansys.mapdl.core.errors import ( - DeprecationError, LicenseServerConnectionError, NotEnoughResources, PortAlreadyInUseByAnMAPDLInstance, @@ -46,7 +45,6 @@ _validate_MPI, _verify_version, get_start_instance, - launch_grpc, launch_mapdl, update_env_vars, ) @@ -212,11 +210,11 @@ def test_license_type_dummy(mapdl): @requires("local") @requires("nostudent") -def test_remove_temp_files(mapdl): +def test_remove_temp_dir_on_exit(mapdl): """Ensure the working directory is removed when run_location is not set.""" mapdl_ = launch_mapdl( port=mapdl.port + 1, - remove_temp_files=True, + remove_temp_dir_on_exit=True, start_timeout=start_timeout, additional_switches=QUICK_LAUNCH_SWITCHES, ) @@ -235,11 +233,11 @@ def test_remove_temp_files(mapdl): @requires("local") @requires("nostudent") -def test_remove_temp_files_fail(tmpdir, mapdl): +def test_remove_temp_dir_on_exit_fail(tmpdir, mapdl): """Ensure the working directory is not removed when the cwd is changed.""" mapdl_ = launch_mapdl( port=mapdl.port + 1, - remove_temp_files=True, + remove_temp_dir_on_exit=True, start_timeout=start_timeout, additional_switches=QUICK_LAUNCH_SWITCHES, ) @@ -496,17 +494,6 @@ def test_fail_channel_ip(): launch_mapdl(channel="something", ip="something") -def test_deprecate_verbose(): - with pytest.raises(DeprecationError): - launch_mapdl(verbose_mapdl=True) - - with pytest.raises(ValueError): - launch_mapdl(verbose=True) - - with pytest.raises(DeprecationError): - launch_grpc(verbose=True) - - @pytest.mark.parametrize( "set_env_var_context,validation", ( diff --git a/tests/test_pool.py b/tests/test_pool.py index 4a17ed0fa0..a42ffc58f4 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -123,7 +123,7 @@ def pool_creator(self, tmpdir_factory): # check it's been cleaned up if mapdl_pool[0] is not None: pth = mapdl_pool[0].directory - if mapdl_pool._spawn_kwargs["remove_temp_files"]: + if mapdl_pool._spawn_kwargs["remove_temp_dir_on_exit"]: assert not list(Path(pth).rglob("*.page*")) @pytest.fixture From 3f4433350979f7eb71789370cf3d85934bd924d0 Mon Sep 17 00:00:00 2001 From: Camille <78221213+clatapie@users.noreply.github.com> Date: Thu, 10 Oct 2024 10:33:24 +0200 Subject: [PATCH 07/33] docs: fix cheat sheet rendering (#3469) * fix: cheat sheet rendering * chore: adding changelog file 3469.documentation.md --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3469.documentation.md | 1 + doc/source/cheat_sheet/cheat_sheet.qmd | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 doc/changelog.d/3469.documentation.md diff --git a/doc/changelog.d/3469.documentation.md b/doc/changelog.d/3469.documentation.md new file mode 100644 index 0000000000..4df69c6b1c --- /dev/null +++ b/doc/changelog.d/3469.documentation.md @@ -0,0 +1 @@ +docs: fix cheat sheet rendering \ No newline at end of file diff --git a/doc/source/cheat_sheet/cheat_sheet.qmd b/doc/source/cheat_sheet/cheat_sheet.qmd index f77a8e257c..b546c81ea0 100644 --- a/doc/source/cheat_sheet/cheat_sheet.qmd +++ b/doc/source/cheat_sheet/cheat_sheet.qmd @@ -77,7 +77,7 @@ mapdl_pool.exit() # PyMAPDL CLI Access MAPDL instances through CLI -```{console} +```{python} #| eval: false # Start an MAPDL instance at port ``50051`` pymapdl start --port 50051 From 939e8adc75f4b38daad8d19b5ba5ba4dd9e84aa0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 11 Oct 2024 11:11:20 +0200 Subject: [PATCH 08/33] build: bump the documentation group with 3 updates (#3478) * build: bump the documentation group with 3 updates Bumps the documentation group with 3 updates: [pypandoc](https://github.com/JessicaTegner/pypandoc), [sphinx-gallery](https://github.com/sphinx-gallery/sphinx-gallery) and [sphinx](https://github.com/sphinx-doc/sphinx). Updates `pypandoc` from 1.13 to 1.14 - [Release notes](https://github.com/JessicaTegner/pypandoc/releases) - [Changelog](https://github.com/JessicaTegner/pypandoc/blob/master/release.md) - [Commits](https://github.com/JessicaTegner/pypandoc/compare/v1.13...v1.14) Updates `sphinx-gallery` from 0.17.1 to 0.18.0 - [Release notes](https://github.com/sphinx-gallery/sphinx-gallery/releases) - [Changelog](https://github.com/sphinx-gallery/sphinx-gallery/blob/master/.github_changelog_generator) - [Commits](https://github.com/sphinx-gallery/sphinx-gallery/compare/v0.17.1...v0.18.0) Updates `sphinx` from 8.0.2 to 8.1.0 - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/master/CHANGES.rst) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v8.0.2...v8.1.0) --- updated-dependencies: - dependency-name: pypandoc dependency-type: direct:production update-type: version-update:semver-minor dependency-group: documentation - dependency-name: sphinx-gallery dependency-type: direct:production update-type: version-update:semver-minor dependency-group: documentation - dependency-name: sphinx dependency-type: direct:production update-type: version-update:semver-minor dependency-group: documentation ... Signed-off-by: dependabot[bot] * chore: adding changelog file 3478.dependencies.md [dependabot-skip] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3478.dependencies.md | 1 + pyproject.toml | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 doc/changelog.d/3478.dependencies.md diff --git a/doc/changelog.d/3478.dependencies.md b/doc/changelog.d/3478.dependencies.md new file mode 100644 index 0000000000..ee0f7490e6 --- /dev/null +++ b/doc/changelog.d/3478.dependencies.md @@ -0,0 +1 @@ +build: bump the documentation group with 3 updates \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index d3c6fd65ca..2ca8ce34fd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,17 +89,17 @@ doc = [ "pandas==2.2.3", "plotly==5.24.1", "pyiges[full]==0.3.1", - "pypandoc==1.13", + "pypandoc==1.14", "pytest-sphinx==0.6.3", "pythreejs==2.4.2", "sphinx-autobuild==2024.10.3", "sphinx-autodoc-typehints==1.25.2", "sphinx-copybutton==0.5.2", "sphinx-design==0.6.1", - "sphinx-gallery==0.17.1", + "sphinx-gallery==0.18.0", "sphinx-jinja==2.0.2", "sphinx-notfound-page==1.0.4", - "sphinx==8.0.2", + "sphinx==8.1.0", "sphinxcontrib-websupport==2.0.0", "sphinxemoji==0.3.1", "vtk==9.3.1", From 2bf19658b8c371c32c37053c2ca3ef4c71ca18b9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 11 Oct 2024 09:44:04 +0000 Subject: [PATCH 09/33] build: bump numpy from 2.1.1 to 2.1.2 in the minimal group (#3476) * build: bump numpy from 2.1.1 to 2.1.2 in the minimal group Bumps the minimal group with 1 update: [numpy](https://github.com/numpy/numpy). Updates `numpy` from 2.1.1 to 2.1.2 - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/RELEASE_WALKTHROUGH.rst) - [Commits](https://github.com/numpy/numpy/compare/v2.1.1...v2.1.2) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch dependency-group: minimal ... Signed-off-by: dependabot[bot] * chore: adding changelog file 3476.maintenance.md [dependabot-skip] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> Co-authored-by: Camille <78221213+clatapie@users.noreply.github.com> --- doc/changelog.d/3476.maintenance.md | 1 + minimum_requirements.txt | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 doc/changelog.d/3476.maintenance.md diff --git a/doc/changelog.d/3476.maintenance.md b/doc/changelog.d/3476.maintenance.md new file mode 100644 index 0000000000..57844d8fc5 --- /dev/null +++ b/doc/changelog.d/3476.maintenance.md @@ -0,0 +1 @@ +build: bump numpy from 2.1.1 to 2.1.2 in the minimal group \ No newline at end of file diff --git a/minimum_requirements.txt b/minimum_requirements.txt index 1220e74fd6..0f95558e27 100644 --- a/minimum_requirements.txt +++ b/minimum_requirements.txt @@ -1,6 +1,6 @@ ansys-api-mapdl==0.5.2 importlib-metadata==8.5.0 -numpy==2.1.1 +numpy==2.1.2 platformdirs==4.3.6 psutil==6.0.0 pyansys-tools-versioning==0.6.0 From b6a9c65141376497cd7b6ad29df5679441f38f42 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 11 Oct 2024 12:02:11 +0200 Subject: [PATCH 10/33] build: bump ansys-tools-visualization-interface from 0.4.4 to 0.4.5 in the core group (#3477) * build: bump ansys-tools-visualization-interface in the core group Bumps the core group with 1 update: [ansys-tools-visualization-interface](https://github.com/ansys/ansys-tools-visualization-interface). Updates `ansys-tools-visualization-interface` from 0.4.4 to 0.4.5 - [Release notes](https://github.com/ansys/ansys-tools-visualization-interface/releases) - [Changelog](https://github.com/ansys/ansys-tools-visualization-interface/blob/main/CHANGELOG.md) - [Commits](https://github.com/ansys/ansys-tools-visualization-interface/compare/v0.4.4...v0.4.5) --- updated-dependencies: - dependency-name: ansys-tools-visualization-interface dependency-type: direct:production update-type: version-update:semver-patch dependency-group: core ... Signed-off-by: dependabot[bot] * chore: adding changelog file 3477.dependencies.md [dependabot-skip] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3477.dependencies.md | 1 + pyproject.toml | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 doc/changelog.d/3477.dependencies.md diff --git a/doc/changelog.d/3477.dependencies.md b/doc/changelog.d/3477.dependencies.md new file mode 100644 index 0000000000..ab555ce9ce --- /dev/null +++ b/doc/changelog.d/3477.dependencies.md @@ -0,0 +1 @@ +build: bump ansys-tools-visualization-interface from 0.4.4 to 0.4.5 in the core group \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 2ca8ce34fd..4de0aa12fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ jupyter = [ tests = [ "ansys-dpf-core==0.10.1", - "ansys-tools-visualization-interface==0.4.4", + "ansys-tools-visualization-interface==0.4.5", "autopep8==2.3.1", "matplotlib==3.9.2", "pandas==2.2.3", @@ -75,7 +75,7 @@ tests = [ doc = [ "ansys-dpf-core==0.10.1", "ansys-mapdl-reader==0.54.1", - "ansys-tools-visualization-interface==0.4.4", + "ansys-tools-visualization-interface==0.4.5", "ansys-sphinx-theme==1.1.2", "grpcio==1.66.2", "imageio-ffmpeg==0.5.1", From 6012cd88de3da625c8ffaf2b0707b4d18972c3b2 Mon Sep 17 00:00:00 2001 From: PyAnsys CI Bot <92810346+pyansys-ci-bot@users.noreply.github.com> Date: Fri, 11 Oct 2024 18:23:20 +0200 Subject: [PATCH 11/33] chore: update CHANGELOG for v0.68.6 (#3479) * chore: updating CHANGELOG for v0.68.6 * chore: adding changelog file 3479.added.md [dependabot-skip] --- doc/changelog.d/3453.dependencies.md | 1 - doc/changelog.d/3454.dependencies.md | 1 - doc/changelog.d/3455.added.md | 1 - doc/changelog.d/3457.fixed.md | 1 - doc/changelog.d/3460.miscellaneous.md | 1 - doc/changelog.d/3461.fixed.md | 1 - doc/changelog.d/3463.fixed.md | 1 - doc/changelog.d/3465.documentation.md | 1 - doc/changelog.d/3468.fixed.md | 1 - doc/changelog.d/3469.documentation.md | 1 - doc/changelog.d/3470.maintenance.md | 1 - doc/changelog.d/3471.maintenance.md | 1 - doc/changelog.d/3472.maintenance.md | 1 - doc/changelog.d/3473.added.md | 1 - doc/changelog.d/3476.maintenance.md | 1 - doc/changelog.d/3477.dependencies.md | 1 - doc/changelog.d/3478.dependencies.md | 1 - doc/changelog.d/3479.added.md | 1 + doc/source/changelog.rst | 49 +++++++++++++++++++++++++++ 19 files changed, 50 insertions(+), 17 deletions(-) delete mode 100644 doc/changelog.d/3453.dependencies.md delete mode 100644 doc/changelog.d/3454.dependencies.md delete mode 100644 doc/changelog.d/3455.added.md delete mode 100644 doc/changelog.d/3457.fixed.md delete mode 100644 doc/changelog.d/3460.miscellaneous.md delete mode 100644 doc/changelog.d/3461.fixed.md delete mode 100644 doc/changelog.d/3463.fixed.md delete mode 100644 doc/changelog.d/3465.documentation.md delete mode 100644 doc/changelog.d/3468.fixed.md delete mode 100644 doc/changelog.d/3469.documentation.md delete mode 100644 doc/changelog.d/3470.maintenance.md delete mode 100644 doc/changelog.d/3471.maintenance.md delete mode 100644 doc/changelog.d/3472.maintenance.md delete mode 100644 doc/changelog.d/3473.added.md delete mode 100644 doc/changelog.d/3476.maintenance.md delete mode 100644 doc/changelog.d/3477.dependencies.md delete mode 100644 doc/changelog.d/3478.dependencies.md create mode 100644 doc/changelog.d/3479.added.md diff --git a/doc/changelog.d/3453.dependencies.md b/doc/changelog.d/3453.dependencies.md deleted file mode 100644 index 6b0cf99f96..0000000000 --- a/doc/changelog.d/3453.dependencies.md +++ /dev/null @@ -1 +0,0 @@ -build: bump grpcio from 1.66.1 to 1.66.2 in the grpc-deps group \ No newline at end of file diff --git a/doc/changelog.d/3454.dependencies.md b/doc/changelog.d/3454.dependencies.md deleted file mode 100644 index bb7cd2169b..0000000000 --- a/doc/changelog.d/3454.dependencies.md +++ /dev/null @@ -1 +0,0 @@ -build: bump sphinx-autobuild from 2024.9.19 to 2024.10.3 in the documentation group \ No newline at end of file diff --git a/doc/changelog.d/3455.added.md b/doc/changelog.d/3455.added.md deleted file mode 100644 index 66d41b6785..0000000000 --- a/doc/changelog.d/3455.added.md +++ /dev/null @@ -1 +0,0 @@ -chore: update CHANGELOG for v0.68.5 \ No newline at end of file diff --git a/doc/changelog.d/3457.fixed.md b/doc/changelog.d/3457.fixed.md deleted file mode 100644 index 7bb5c7aa1c..0000000000 --- a/doc/changelog.d/3457.fixed.md +++ /dev/null @@ -1 +0,0 @@ -fix: contributors file \ No newline at end of file diff --git a/doc/changelog.d/3460.miscellaneous.md b/doc/changelog.d/3460.miscellaneous.md deleted file mode 100644 index 47a89a2618..0000000000 --- a/doc/changelog.d/3460.miscellaneous.md +++ /dev/null @@ -1 +0,0 @@ -feat: having two global flags. One for visualizer and one for pyvista \ No newline at end of file diff --git a/doc/changelog.d/3461.fixed.md b/doc/changelog.d/3461.fixed.md deleted file mode 100644 index 76f0aad061..0000000000 --- a/doc/changelog.d/3461.fixed.md +++ /dev/null @@ -1 +0,0 @@ -fix: environment variables not being passed to MAPDL process \ No newline at end of file diff --git a/doc/changelog.d/3463.fixed.md b/doc/changelog.d/3463.fixed.md deleted file mode 100644 index f7d688caa4..0000000000 --- a/doc/changelog.d/3463.fixed.md +++ /dev/null @@ -1 +0,0 @@ -fix: exiting earlier to avoid exceptions from gRPC calls \ No newline at end of file diff --git a/doc/changelog.d/3465.documentation.md b/doc/changelog.d/3465.documentation.md deleted file mode 100644 index 16dadbee9b..0000000000 --- a/doc/changelog.d/3465.documentation.md +++ /dev/null @@ -1 +0,0 @@ -docs: another hpc docs reorg \ No newline at end of file diff --git a/doc/changelog.d/3468.fixed.md b/doc/changelog.d/3468.fixed.md deleted file mode 100644 index ab369c1e41..0000000000 --- a/doc/changelog.d/3468.fixed.md +++ /dev/null @@ -1 +0,0 @@ -fix: add ``build cheatsheet`` as env variable within doc-build \ No newline at end of file diff --git a/doc/changelog.d/3469.documentation.md b/doc/changelog.d/3469.documentation.md deleted file mode 100644 index 4df69c6b1c..0000000000 --- a/doc/changelog.d/3469.documentation.md +++ /dev/null @@ -1 +0,0 @@ -docs: fix cheat sheet rendering \ No newline at end of file diff --git a/doc/changelog.d/3470.maintenance.md b/doc/changelog.d/3470.maintenance.md deleted file mode 100644 index 031d764761..0000000000 --- a/doc/changelog.d/3470.maintenance.md +++ /dev/null @@ -1 +0,0 @@ -ci: bump the actions group with 2 updates \ No newline at end of file diff --git a/doc/changelog.d/3471.maintenance.md b/doc/changelog.d/3471.maintenance.md deleted file mode 100644 index 23264207b9..0000000000 --- a/doc/changelog.d/3471.maintenance.md +++ /dev/null @@ -1 +0,0 @@ -ci: pre-commit autoupdate \ No newline at end of file diff --git a/doc/changelog.d/3472.maintenance.md b/doc/changelog.d/3472.maintenance.md deleted file mode 100644 index 611a7b7bbc..0000000000 --- a/doc/changelog.d/3472.maintenance.md +++ /dev/null @@ -1 +0,0 @@ -ci: bypass team check if it is dependabot \ No newline at end of file diff --git a/doc/changelog.d/3473.added.md b/doc/changelog.d/3473.added.md deleted file mode 100644 index 4cf3d59e82..0000000000 --- a/doc/changelog.d/3473.added.md +++ /dev/null @@ -1 +0,0 @@ -refactor: removing deprecated arguments \ No newline at end of file diff --git a/doc/changelog.d/3476.maintenance.md b/doc/changelog.d/3476.maintenance.md deleted file mode 100644 index 57844d8fc5..0000000000 --- a/doc/changelog.d/3476.maintenance.md +++ /dev/null @@ -1 +0,0 @@ -build: bump numpy from 2.1.1 to 2.1.2 in the minimal group \ No newline at end of file diff --git a/doc/changelog.d/3477.dependencies.md b/doc/changelog.d/3477.dependencies.md deleted file mode 100644 index ab555ce9ce..0000000000 --- a/doc/changelog.d/3477.dependencies.md +++ /dev/null @@ -1 +0,0 @@ -build: bump ansys-tools-visualization-interface from 0.4.4 to 0.4.5 in the core group \ No newline at end of file diff --git a/doc/changelog.d/3478.dependencies.md b/doc/changelog.d/3478.dependencies.md deleted file mode 100644 index ee0f7490e6..0000000000 --- a/doc/changelog.d/3478.dependencies.md +++ /dev/null @@ -1 +0,0 @@ -build: bump the documentation group with 3 updates \ No newline at end of file diff --git a/doc/changelog.d/3479.added.md b/doc/changelog.d/3479.added.md new file mode 100644 index 0000000000..1f7da7eaf7 --- /dev/null +++ b/doc/changelog.d/3479.added.md @@ -0,0 +1 @@ +chore: update CHANGELOG for v0.68.6 \ No newline at end of file diff --git a/doc/source/changelog.rst b/doc/source/changelog.rst index 5bb1ef792f..c814e6df83 100644 --- a/doc/source/changelog.rst +++ b/doc/source/changelog.rst @@ -9,6 +9,55 @@ This document contains the release notes for the project. .. towncrier release notes start +`0.68.6 `_ - 2024-10-11 +============================================================================== + +Added +^^^^^ + +- chore: update CHANGELOG for v0.68.5 `#3455 `_ +- refactor: removing deprecated arguments `#3473 `_ + + +Fixed +^^^^^ + +- fix: contributors file `#3457 `_ +- fix: environment variables not being passed to MAPDL process `#3461 `_ +- fix: exiting earlier to avoid exceptions from gRPC calls `#3463 `_ +- fix: add ``build cheatsheet`` as env variable within doc-build `#3468 `_ + + +Dependencies +^^^^^^^^^^^^ + +- build: bump grpcio from 1.66.1 to 1.66.2 in the grpc-deps group `#3453 `_ +- build: bump sphinx-autobuild from 2024.9.19 to 2024.10.3 in the documentation group `#3454 `_ +- build: bump ansys-tools-visualization-interface from 0.4.4 to 0.4.5 in the core group `#3477 `_ +- build: bump the documentation group with 3 updates `#3478 `_ + + +Miscellaneous +^^^^^^^^^^^^^ + +- feat: having two global flags. One for visualizer and one for pyvista `#3460 `_ + + +Documentation +^^^^^^^^^^^^^ + +- docs: another hpc docs reorg `#3465 `_ +- docs: fix cheat sheet rendering `#3469 `_ + + +Maintenance +^^^^^^^^^^^ + +- ci: bump the actions group with 2 updates `#3470 `_ +- ci: pre-commit autoupdate `#3471 `_ +- ci: bypass team check if it is dependabot `#3472 `_ +- build: bump numpy from 2.1.1 to 2.1.2 in the minimal group `#3476 `_ + `0.68.5 `_ - 2024-10-04 ============================================================================== From c0857064d59091c13bcaa1e01fb2691bc8ab9e18 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 15 Oct 2024 11:10:36 +0200 Subject: [PATCH 12/33] ci: pre-commit autoupdate (#3482) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ci: pre-commit autoupdate updates: - [github.com/python-jsonschema/check-jsonschema: 0.29.3 → 0.29.4](https://github.com/python-jsonschema/check-jsonschema/compare/0.29.3...0.29.4) * chore: adding changelog file 3482.maintenance.md [dependabot-skip] --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- doc/changelog.d/3482.maintenance.md | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 doc/changelog.d/3482.maintenance.md diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 06938f50fa..db0179e0f3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -78,6 +78,6 @@ repos: # this validates our github workflow files - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.29.3 + rev: 0.29.4 hooks: - id: check-github-workflows diff --git a/doc/changelog.d/3482.maintenance.md b/doc/changelog.d/3482.maintenance.md new file mode 100644 index 0000000000..23264207b9 --- /dev/null +++ b/doc/changelog.d/3482.maintenance.md @@ -0,0 +1 @@ +ci: pre-commit autoupdate \ No newline at end of file From a449852e8568d07ef9f25abe204eb50ec7536c27 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 15 Oct 2024 11:11:41 +0200 Subject: [PATCH 13/33] ci: bump thollander/actions-comment-pull-request from 2 to 3 in the actions group (#3481) * ci: bump thollander/actions-comment-pull-request in the actions group Bumps the actions group with 1 update: [thollander/actions-comment-pull-request](https://github.com/thollander/actions-comment-pull-request). Updates `thollander/actions-comment-pull-request` from 2 to 3 - [Release notes](https://github.com/thollander/actions-comment-pull-request/releases) - [Commits](https://github.com/thollander/actions-comment-pull-request/compare/v2...v3) --- updated-dependencies: - dependency-name: thollander/actions-comment-pull-request dependency-type: direct:production update-type: version-update:semver-major dependency-group: actions ... Signed-off-by: dependabot[bot] * chore: adding changelog file 3481.maintenance.md [dependabot-skip] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- .github/workflows/ci.yml | 2 +- doc/changelog.d/3481.maintenance.md | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 doc/changelog.d/3481.maintenance.md diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e85457424f..57b9d6889f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -715,7 +715,7 @@ jobs: - name: "PR comment with reactions" if: ${{ steps.attatch-to-pr.outputs.pushed == 'true' }} - uses: thollander/actions-comment-pull-request@v2 + uses: thollander/actions-comment-pull-request@v3 with: message: | Hello! :wave: diff --git a/doc/changelog.d/3481.maintenance.md b/doc/changelog.d/3481.maintenance.md new file mode 100644 index 0000000000..3f73b232e1 --- /dev/null +++ b/doc/changelog.d/3481.maintenance.md @@ -0,0 +1 @@ +ci: bump thollander/actions-comment-pull-request from 2 to 3 in the actions group \ No newline at end of file From 64b0ddaf3a39bfad5ff60838a4e4419e30f2fb52 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Tue, 15 Oct 2024 18:06:16 +0200 Subject: [PATCH 14/33] ci: force coloring in pytest (#3484) * ci: force coloring in pytest * chore: adding changelog file 3484.maintenance.md [dependabot-skip] --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- .github/workflows/ci.yml | 2 +- doc/changelog.d/3484.maintenance.md | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 doc/changelog.d/3484.maintenance.md diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 57b9d6889f..5abb95986c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -33,7 +33,7 @@ env: DPF_PORT: 21004 MAPDL_PACKAGE: ghcr.io/ansys/mapdl ON_CI: True - PYTEST_ARGUMENTS: '-vvv -ra --durations=10 --maxfail=3 --reruns 3 --reruns-delay 4 --cov=ansys.mapdl.core --cov-report=html' + PYTEST_ARGUMENTS: '-vvv --color=yes -ra --durations=10 --maxfail=3 --reruns 3 --reruns-delay 4 --cov=ansys.mapdl.core --cov-report=html' BUILD_CHEATSHEET: True # Following env vars when changed will "reset" the mentioned cache, diff --git a/doc/changelog.d/3484.maintenance.md b/doc/changelog.d/3484.maintenance.md new file mode 100644 index 0000000000..1caacc979a --- /dev/null +++ b/doc/changelog.d/3484.maintenance.md @@ -0,0 +1 @@ +ci: force coloring in pytest \ No newline at end of file From 71db67d3ef73091466855d84d6fef93aef0751b6 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Wed, 16 Oct 2024 16:08:17 +0200 Subject: [PATCH 15/33] refactor: launch_mapdl (#3475) * feat: proof of concept * chore: adding changelog file 3475.added.md [dependabot-skip] * test: commenting complex test * test: adding arg to pytest default. * chore: changing skip message. * refactor: launch_mapdl * feat: proof of concept * test: commenting complex test * test: adding arg to pytest default. * chore: changing skip message. * refactor: launch_mapdl * chore: adding changelog file 3475.added.md [dependabot-skip] * fix: pypim test * refactor: move start args to another function. Group also more SLURM code. * refactor: more refactor and typing. * test: fix * fix: typing * refactor: reorg to generate start_parm just before launching MAPDL. Getting rid of update_start_param. Using get_cpu. Renaming functions. * refactor: reorder some objects differently in mapdl_core * refactor: adapting test functions. * test: fix test * refactor: change order of start_instance branching * refactor: breaking 'launch_grpc'. Get port in 'get_port'. Reducing importance of 'launch_grpc'. Externalizing cmd generation. * test: fix launch_remote * chore: adding ignore image cache argument to pytest default options * fix: wrong signature * fix: passing too many arguments * fix: unrecognised argument * fix: arguments * ci: auto fixes from pre-commit.com hooks. for more information, see https://pre-commit.ci * test: fix * ci: trigger CICD * fix: not setting mode. * feat: changing order in branching * fix: using version and exec_file * refactor: avoiding additional_switches on not launching. Small adjustments. * fix: launching on busy port. * fix: tests * fix: open gui * feat: caching mapdl directory when exiting * fix: bypassing error when mapdl._exited is True from previous failed tests * chore: removing commented code. * fix: test * fix: not raising if env var does not exists. * fix: test * refactor: run get_location only when starting instance * fix: Passing arguments to start_parm * fix: giving priority to port argument instead of the env var * feat: adding early exit to pool. * fix: tests * ci: retrigger * fix: removing repeated arguments * fix: added missing import * fix: missing package * feat: adding more info to the exception * test: skipping pool tests in remote. * fix: removing unneeded argument. * feat: improving test * feat: removing annotation and passing `start_instance` to `get_port`. * refactor: cleaner env var checks * fix: changing port when env var is used. * refactor: cosmetic change in env vars * feat: giving priority to arguments, over env vars. * refactor: Homogenising precedence between args and env var. * test: add solu processor call * test: testing port as strings in pool.py * tests: adding test to is_on_slurm and test_generate_mapdl_launch_command_**OS** * build: adding pyfake dependency. Using it for testing * test: running /solu silently * test: fix not finding /proc/meminfo * test: using patch * test: disable part of the test if not on local * fix: removing fixture * chore: adding more comments. * test: removing composite example * fix: remove unused code * refactor: increasing coverage * test: fix * fix: test * fix: some codacity issues. * docs: fix some docstrings * test: adding test for launch_grpc * test: improving coverage * fix: infinite loop * refactor: a bit better --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> Co-authored-by: Gayuso Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- doc/changelog.d/3475.added.md | 1 + .../extended_examples/executable/cli_rotor.py | 2 +- .../extended_examples/executable/rotor.py | 2 +- examples/00-mapdl-examples/composite_dcb.py | 488 ----- pyproject.toml | 3 +- src/ansys/mapdl/core/examples/downloads.py | 4 + src/ansys/mapdl/core/launcher.py | 1585 +++++++++-------- src/ansys/mapdl/core/mapdl_core.py | 44 +- src/ansys/mapdl/core/mapdl_grpc.py | 27 +- src/ansys/mapdl/core/misc.py | 12 +- src/ansys/mapdl/core/pool.py | 17 +- tests/common.py | 22 +- tests/conftest.py | 16 +- tests/test_launcher.py | 534 +++++- tests/test_mapdl.py | 11 +- tests/test_pool.py | 2 +- 16 files changed, 1395 insertions(+), 1375 deletions(-) create mode 100644 doc/changelog.d/3475.added.md delete mode 100644 examples/00-mapdl-examples/composite_dcb.py diff --git a/doc/changelog.d/3475.added.md b/doc/changelog.d/3475.added.md new file mode 100644 index 0000000000..3750f99954 --- /dev/null +++ b/doc/changelog.d/3475.added.md @@ -0,0 +1 @@ +refactor: launch_mapdl \ No newline at end of file diff --git a/doc/source/examples/extended_examples/executable/cli_rotor.py b/doc/source/examples/extended_examples/executable/cli_rotor.py index 646316a331..8004fc5c89 100644 --- a/doc/source/examples/extended_examples/executable/cli_rotor.py +++ b/doc/source/examples/extended_examples/executable/cli_rotor.py @@ -46,7 +46,7 @@ def main(n_blades, blade_length, elastic_modulus, density): f"Elastic modulus: {elastic_modulus/1E9} GPa\nDensity: {density} Kg/m3" ) # Launch MAPDL - mapdl = launch_mapdl(port=50052) + mapdl = launch_mapdl() mapdl.clear() mapdl.prep7() diff --git a/doc/source/examples/extended_examples/executable/rotor.py b/doc/source/examples/extended_examples/executable/rotor.py index 83aa5dc409..4811145698 100644 --- a/doc/source/examples/extended_examples/executable/rotor.py +++ b/doc/source/examples/extended_examples/executable/rotor.py @@ -29,7 +29,7 @@ from ansys.mapdl.core import launch_mapdl # Launch MAPDL -mapdl = launch_mapdl(port=50052) +mapdl = launch_mapdl() mapdl.clear() mapdl.prep7() diff --git a/examples/00-mapdl-examples/composite_dcb.py b/examples/00-mapdl-examples/composite_dcb.py deleted file mode 100644 index 0f27da5836..0000000000 --- a/examples/00-mapdl-examples/composite_dcb.py +++ /dev/null @@ -1,488 +0,0 @@ -# Copyright (C) 2016 - 2024 ANSYS, Inc. and/or its affiliates. -# SPDX-License-Identifier: MIT -# -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -""" -.. _ref_dcb_test_composite_delamination: - -====================================================================== -Static simulation of double cantilever beam test via cohesive elements -====================================================================== - -This example is a classic double cantilever beam test commonly used -to study mode I interfacial delamination of composite plates. - -Description -=========== - -Objective -~~~~~~~~~ - -This example shows how to use PyMAPDL to simulate delamination in -composite materials. PyDPF modules are also used for the postprocessing of the results. - -Problem figure -~~~~~~~~~~~~~~ - -.. figure:: ../../../images/dcb_test_dark_theme.png - :figclass: only-dark - :width: 400pt - :alt: DCB experimental test setup - - **Source**: https://www.wikiwand.com/en/Delamination - - -.. figure:: ../../../images/dcb_test_light_theme.png - :figclass: only-light - :width: 400pt - :alt: DCB experimental test setup - - **Source**: https://www.wikiwand.com/en/Delamination - -Procedure -~~~~~~~~~ -* Launch the MAPDL instance. -* Set up the model. -* Solve the model. -* Plot results using PyMAPDL. -* Plot results using PyDPF. -* Plot reaction force. - -Additional packages -~~~~~~~~~~~~~~~~~~~ -These additional packages are imported for use: -* - `Matplotlib `_ for plotting -* - `Pandas `_ for data analysis and manipulation -""" - -############################################################################### -# Start MAPDL as a service -# ~~~~~~~~~~~~~~~~~~~~~~~~ -# This example begins by importing the required packages and then launching Ansys Mechanical APDL. -import os -import tempfile - -from ansys.dpf import core as dpf -import matplotlib.pyplot as plt -import numpy as np -import pyvista as pv - -from ansys.mapdl import core as pymapdl - -# Start MAPDL as a service -mapdl = pymapdl.launch_mapdl() -print(mapdl) - -############################################################################### -# Set geometrical inputs -# ====================== -# Set geometrical inputs for the model. -length = 75.0 -pre_crack = 10.0 -width = 25.0 -height = 1.7 -d = 10.0 -# a small quantity defined for avoiding rounding-off error when picking geometrical entities -eps = 1e-1 - -############################################################################### -# Set up the model -# ================ -# Set up the model by choosing the units system and the -# element types for the simulations. Because a fully 3D approach -# is chosen for this example, ``SOLID186`` elements are used for meshing volumes, and -# ``TARGE170`` and ``CONTA174`` are used for modelling cohesive elements in between contact -# surfaces. -# -# -# Define material parameters -# ~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Composite plates are modelled using homogeneous linear elastic orthotropic -# properties, whereas a bilinear cohesive law is used for cohesive elements. - -# Enter the preprocessor and define the unit system -mapdl.prep7() -mapdl.units("mpa") - -# Define SOLID185, TARGE170, and CONTA174 elements, along with the element size -mapdl.et(1, 185) -mapdl.et(2, 170) -mapdl.et(3, 174) -mapdl.esize(10.0) - -# Define material properties for the composite plates -mapdl.mp("ex", 1, 61340) -mapdl.mp("dens", 1, 1.42e-09) -mapdl.mp("nuxy", 1, 0.1) - -# Define the bilinear cohesive law -mapdl.mp("mu", 2, 0) -mapdl.tb("czm", 2, 1, "", "bili") -mapdl.tbtemp(25.0) -mapdl.tbdata(1, 50.0, 0.5, 50, 0.5, 0.01, 2) - -############################################################################### -# Create the geometry in the model and meshing -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# The two plates are generated as two parallelepipeds. Composite material properties -# and the three-dimensional elements are then assigned. - -# Generate the two composite plates -vnum0 = mapdl.block(0.0, length + pre_crack, 0.0, width, 0.0, height) -vnum1 = mapdl.block(0.0, length + pre_crack, 0.0, width, height, 2 * height) - -# Assign material properties and element type -mapdl.mat(1) -mapdl.type(1) - -# performing the meshing -mapdl.vmesh(vnum0) -mapdl.vmesh(vnum1) -mapdl.eplot() - -############################################################################### -# Generate cohesive elements in between the contact surfaces -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# The generation of cohesive elements is the most delicate part of the -# modelling approach. First, the two contact surfaces are identified -# and defined as a components (in this case `cm_1` and `cm_2` respectively). -# Then, the real constants for the ``CONTA174`` and ``TARGE170`` elements and -# their key options are set to capture the correct behavior. Descriptions for each -# of these parameters can be found in the Ansys element documentation. -# Finally, elements are generated on top of the respective surfaces `cm_1` and -# `cm_2`. - -# Identify the two touching areas and assign them to components -mapdl.allsel() -mapdl.asel("s", "loc", "z", 1.7) -areas = mapdl.geometry.anum -mapdl.asel("r", vmin=areas[0]) -mapdl.nsla("r", 1) -mapdl.nsel("r", "loc", "x", pre_crack, length + pre_crack + eps) -mapdl.components["cm_1"] = "node" - -mapdl.allsel() -mapdl.asel("s", "loc", "z", 1.7) -areas = mapdl.geometry.anum -mapdl.asel("r", vmin=areas[1]) -mapdl.nsla("r", 1) -mapdl.nsel("r", "loc", "x", pre_crack, length + pre_crack + eps) -mapdl.components["cm_2"] = "node" - -# Identify all the elements before generation of TARGE170 elements -mapdl.allsel() -mapdl.components["_elemcm"] = "elem" -mapdl.mat(2) - -# Assign real constants and key options -mapdl.r(3, "", "", 1.0, 0.1, 0, "") -mapdl.rmore("", "", 1.0e20, 0.0, 1.0, "") -mapdl.rmore(0.0, 0.0, 1.0, "", 1.0, 0.5) -mapdl.rmore(0.0, 1.0, 1.0, 0.0, "", 1.0) -mapdl.rmore("", "", "", "", "", 1.0) -mapdl.keyopt(3, 4, 0) -mapdl.keyopt(3, 5, 0) -mapdl.keyopt(3, 7, 0) -mapdl.keyopt(3, 8, 0) -mapdl.keyopt(3, 9, 0) -mapdl.keyopt(3, 10, 0) -mapdl.keyopt(3, 11, 0) -mapdl.keyopt(3, 12, 3) -mapdl.keyopt(3, 14, 0) -mapdl.keyopt(3, 18, 0) -mapdl.keyopt(3, 2, 0) -mapdl.keyopt(2, 5, 0) - -# Generate TARGE170 elements on top of cm_1 -mapdl.nsel("s", "", "", "cm_1") -mapdl.components["_target"] = "node" -mapdl.type(2) -mapdl.esln("s", 0) -mapdl.esurf() - -# Generate CONTA174 elements on top of cm_2 -mapdl.cmsel("s", "_elemcm") -mapdl.nsel("s", "", "", "cm_2") -mapdl.components["_contact"] = "node" -mapdl.type(3) -mapdl.esln("s", 0) -mapdl.esurf() - -############################################################################### -# Generate boundary conditions -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Assign boundary conditions to replicate the real test conditions. One end of the two -# composite plates is fixed against translation along the x, y, and z axis. On the -# opposite side of the plate, displacement conditions are applied to -# simulate the interfacial crack opening. These conditions are applied to the -# top and bottom nodes corresponding to the geometrical edges located -# respectively at these (x, y, z) coordinates:, ``(0.0, `y`, 0.0)`` and ``(0.0, `y`, 3.4)``. -# Two different components are assigned to these sets of nodes for a faster -# identification of the nodes bearing reaction forces. - -# Apply the two displacement conditions -mapdl.allsel() -mapdl.nsel(type_="s", item="loc", comp="x", vmin=0.0, vmax=0.0) -mapdl.nsel(type_="r", item="loc", comp="z", vmin=2 * height, vmax=2 * height) -mapdl.d(node="all", lab="uz", value=d) -mapdl.components["top_nod"] = "node" - -mapdl.allsel() -mapdl.nsel(type_="s", item="loc", comp="x", vmin=0.0, vmax=0.0) -mapdl.nsel(type_="r", item="loc", comp="z", vmin=0.0, vmax=0.0) -mapdl.d(node="all", lab="uz", value=-10) -mapdl.components["bot_nod"] = "node" - -# Apply the fix condition -mapdl.allsel() -mapdl.nsel( - type_="s", - item="loc", - comp="x", - vmin=length + pre_crack, - vmax=length + pre_crack, -) -mapdl.d(node="all", lab="ux", value=0.0) -mapdl.d(node="all", lab="uy", value=0.0) -mapdl.d(node="all", lab="uz", value=0.0) - -mapdl.eplot( - plot_bc=True, - bc_glyph_size=3, - title="", -) - -############################################################################### -# Solve the non-linear static analysis -# ==================================== -# -# Run a non-linear static analysis. To have smooth crack opening -# progression and facilitate convergency for the static solver, request -# 100 substeps. - -# Enter the solution processor and define the analysis settings -mapdl.allsel() -mapdl.finish() -mapdl.run("/SOLU") -mapdl.antype("static") - -# Activate non-linear geometry -mapdl.nlgeom("on") - -# Request substeps -mapdl.autots(key="on") -mapdl.nsubst(nsbstp=100, nsbmx=100, nsbmn=100) -mapdl.kbc(key=0) -mapdl.outres("all", "all") - -# Solve -output = mapdl.solve() - -############################################################################### -# Postprocessing -# ============== -# Use PyMAPDL and PyDPF for postprocessing. -# -# -# Postprocess results using PyMAPDL -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# This section shows how to use PyMAPDL to postprocess results. Because -# measuring the delamination length is important, plot the cohesive damage parameter. -# Although the damage parameter is an element parameter, the result is -# provided in terms of a nodal result. Thus, the result for just one of -# the four-noded cohesive element ``NMISC = 70`` is presented. -# The result for the other nodes are present at ``NMISC = 71,72,73``. -# You can retrieve the actual damage parameter nodal values from the -# solved model in form of a table (or an array). - -# Enter the postprocessor -mapdl.post1() - -# Select the substep -mapdl.set(1, 100) - -# Select ``CONTA174`` elements -mapdl.allsel() -mapdl.esel("s", "ename", "", 174) - -# Plot the element values -mapdl.post_processing.plot_element_values( - "nmisc", 70, scalar_bar_args={"title": "Cohesive Damage"} -) - -# Extract the nodal values of the damage parameter -mapdl.allsel() -mapdl.esel("s", "ename", "", 174) -mapdl.etable("damage", "nmisc", 70) - -damage_df = mapdl.pretab("damage").to_dataframe() - -############################################################################### -# Postprocessing results using PyDPF -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Use PyDPF to visualize the crack opening throughout the simulation as -# an animation. - -temp_directory = tempfile.gettempdir() -rst_path = mapdl.download_result(temp_directory) - -dpf.core.make_tmp_dir_server(dpf.SERVER) - -if dpf.SERVER.local_server: - path_source = rst_path -else: - path_source = dpf.upload_file_in_tmp_folder(rst_path) - -# Building the model -model = dpf.Model(path_source) - -# Get the mesh of the whole model -meshed_region = model.metadata.meshed_region - -# Get the mesh of the cohesive elements -mesh_scoping_cohesive = dpf.mesh_scoping_factory.named_selection_scoping( - "CM_1", model=model -) - -result_mesh = dpf.operators.mesh.from_scoping( - scoping=mesh_scoping_cohesive, inclusive=0, mesh=meshed_region -).eval() - -# Get the coordinates field for each mesh -mesh_field = meshed_region.field_of_properties(dpf.common.nodal_properties.coordinates) -mesh_field_cohesive = result_mesh.field_of_properties( - dpf.common.nodal_properties.coordinates -) - -# Get the index of the NMISC results -nmisc_index = 70 - -# Generate the damage result operator -data_src = dpf.DataSources(path_source) -dam_op = dpf.operators.result.nmisc(data_sources=data_src, item_index=70) - -# Generate the displacement operator -disp_op = model.results.displacement() - -# Create sum operators to compute the updated coordinates at step n -add_op = dpf.operators.math.add(fieldA=mesh_field) -add_op_cohesive = dpf.operators.math.add(fieldA=mesh_field_cohesive) - -# Instantiate a PyVista plotter and start the creation of a GIF -plotter = pv.Plotter(notebook=False, off_screen=True) -plotter.open_gif("dcb.gif") - -# Add the beam mesh to the scene -mesh_beam = meshed_region.grid -plotter.add_mesh( - mesh_beam, - lighting=False, - show_edges=True, - scalar_bar_args={"title": "Cohesive Damage"}, - clim=[0, 1], - opacity=0.3, -) - - -# Add the contact mesh to the scene -mesh_contact = result_mesh.grid -plotter.add_mesh( - mesh_contact, - opacity=0.9, - scalar_bar_args={"title": "Cohesive Damage"}, - clim=[0, 1], - scalars=np.zeros((mesh_contact.n_cells)), -) -for i in range(1, 100): - # Get displacements - disp = model.results.displacement(time_scoping=i).eval() - # Getting the updated coordinates - add_op.inputs.fieldB.connect(disp[0]) - disp_result = add_op.outputs.field() - # Get displacements for the cohesive layer - disp = model.results.displacement( - time_scoping=i, mesh_scoping=mesh_scoping_cohesive - ).eval() - # Get the updated coordinates for the cohesive layer - add_op_cohesive.inputs.fieldB.connect(disp[0]) - disp_cohesive = add_op_cohesive.outputs.field() - # Get the damage field - dam_op.inputs.time_scoping([i]) - cohesive_damage = dam_op.outputs.fields_container()[0] - # Update coordinates and scalars - plotter.update_coordinates(disp_result.data, mesh=mesh_beam, render=False) - plotter.update_coordinates(disp_cohesive.data, mesh=mesh_contact, render=False) - plotter.update_scalars(cohesive_damage.data, mesh=mesh_contact, render=False) - plotter.write_frame() - -plotter.close() - - -############################################################################### -# Plot the reaction force at the bottom nodes -mesh_scoping = model.metadata.named_selection("BOT_NOD") -f_tot = [] -d_tot = [] -for i in range(0, 100): - force_eval = model.results.element_nodal_forces( - time_scoping=i, mesh_scoping=mesh_scoping - ).eval() - force = force_eval[0].data - f_tot += [np.sum(force[:, 2])] - d = abs( - model.results.displacement(time_scoping=i, mesh_scoping=mesh_scoping) - .eval()[0] - .data[0] - ) - d_tot += [d[2]] - -d_tot[0] = 0 -f_tot[0] = 0 - -fig, ax = plt.subplots() - -plt.plot(d_tot, f_tot, "b") -plt.ylabel("Force [N]") -plt.xlabel("Displacement [mm]") -plt.show() - -############################################################################### -# Animate results using PyDPF with .animate() method -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Use PyDPF method :func:`FieldsContainer.animate() ` to visualize the crack opening throughout the simulation as -# an animation. -disp = model.results.displacement.on_all_time_freqs.eval() -camera_pos = disp.animate( - scale_factor=1.0, - save_as="dcb_animate.gif", - return_cpos=True, - show_axes=True, -) - -############################################################################### -# -# Exit MAPDL -mapdl.exit() - -try: - os.remove(path_source) -except (FileNotFoundError, PermissionError): - pass diff --git a/pyproject.toml b/pyproject.toml index 4de0aa12fe..034522ee2f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,6 +63,7 @@ tests = [ "matplotlib==3.9.2", "pandas==2.2.3", "pyansys-tools-report==0.8.0", + "pyfakefs==5.7.1", "pyiges[full]==0.3.1", "pytest-cov==5.0.0", "pytest-memprof<0.3.0", @@ -119,7 +120,7 @@ pymapdl_convert_script = "ansys.mapdl.core.cli:old_pymapdl_convert_script_entry_ pymapdl = "ansys.mapdl.core.cli:main" [tool.pytest.ini_options] -addopts = "-ra -vvv" +addopts = "-ra -vvv --maxfail=10" junit_family = "legacy" filterwarnings = [ "ignore::FutureWarning", diff --git a/src/ansys/mapdl/core/examples/downloads.py b/src/ansys/mapdl/core/examples/downloads.py index cb8ec7966f..e8ba2f59f6 100644 --- a/src/ansys/mapdl/core/examples/downloads.py +++ b/src/ansys/mapdl/core/examples/downloads.py @@ -168,10 +168,12 @@ def download_manifold_example_data() -> dict: download paths into a dictionary domain id->path. Examples files are downloaded to a persistent cache to avoid re-downloading the same file twice. + Returns ------- dict[str:str] Path to the example files. + Examples -------- Download the manifold geometry, ans file and return the path of the file @@ -198,10 +200,12 @@ def download_cfx_mapping_example_data() -> dict: download paths into a dictionary domain id->path. Examples files are downloaded to a persistent cache to avoid re-downloading the same file twice. + Returns ------- dict[str:str] Path to the example files. + Examples -------- >>> from ansys.mapdl.core.examples.downloads import download_cfx_mapping_example_data diff --git a/src/ansys/mapdl/core/launcher.py b/src/ansys/mapdl/core/launcher.py index 48015b37c5..2c64723498 100644 --- a/src/ansys/mapdl/core/launcher.py +++ b/src/ansys/mapdl/core/launcher.py @@ -29,10 +29,9 @@ import re import socket import subprocess -import tempfile import threading import time -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union import warnings import psutil @@ -41,6 +40,7 @@ import ansys.platform.instancemanagement as pypim _HAS_PIM = True + except ModuleNotFoundError: # pragma: no cover _HAS_PIM = False @@ -55,10 +55,8 @@ from ansys.mapdl.core import LOG from ansys.mapdl.core._version import SUPPORTED_ANSYS_VERSIONS from ansys.mapdl.core.errors import ( - DeprecationError, LockFileException, MapdlDidNotStart, - MapdlRuntimeError, NotEnoughResources, PortAlreadyInUse, PortAlreadyInUseByAnMAPDLInstance, @@ -71,7 +69,6 @@ check_valid_ip, check_valid_port, create_temp_dir, - random_string, threaded, ) @@ -92,14 +89,49 @@ CONFIG_FILE = os.path.join(SETTINGS_DIR, "config.txt") ALLOWABLE_MODES = ["console", "grpc"] +ALLOWABLE_VERSION_INT = tuple(SUPPORTED_ANSYS_VERSIONS.keys()) + +ALLOWABLE_LAUNCH_MAPDL_ARGS = [ + "exec_file", + "run_location", + "jobname", + "nproc", + "ram", + "mode", + "override", + "loglevel", + "additional_switches", + "start_timeout", + "port", + "cleanup_on_exit", + "start_instance", + "ip", + "clear_on_connect", + "log_apdl", + "remove_temp_dir_on_exit", + "license_server_check", + "license_type", + "print_com", + "add_env_vars", + "replace_env_vars", + "version", + "detect_slurm_config", + "set_no_abort", + "force_intel" + # Non documented args + "use_vtk", + "just_launch", + "on_pool", + "_debug_no_launch", +] ON_WSL = os.name == "posix" and ( - bool(os.environ.get("WSL_DISTRO_NAME", None)) - or bool(os.environ.get("WSL_INTEROP", None)) + os.environ.get("WSL_DISTRO_NAME") or os.environ.get("WSL_INTEROP") ) if ON_WSL: LOG.info("On WSL: Running on WSL detected.") + LOG.debug("On WSL: Allowing 'start_instance' and 'ip' arguments together.") LOCALHOST = "127.0.0.1" MAPDL_DEFAULT_PORT = 50052 @@ -279,22 +311,15 @@ def port_in_use_using_psutil(port: Union[int, str]) -> bool: return False -def launch_grpc( +def generate_mapdl_launch_command( exec_file: str = "", jobname: str = "file", nproc: int = 2, ram: Optional[int] = None, - run_location: str = None, port: int = MAPDL_DEFAULT_PORT, additional_switches: str = "", - override: bool = True, - timeout: int = 20, - verbose: Optional[bool] = None, - add_env_vars: Optional[Dict[str, str]] = None, - replace_env_vars: Optional[Dict[str, str]] = None, - **kwargs, # to keep compatibility with console interface. -) -> Tuple[int, str, subprocess.Popen]: - """Start MAPDL locally in gRPC mode. +) -> str: + """Generate the command line to start MAPDL in gRPC mode. Parameters ---------- @@ -313,10 +338,6 @@ def launch_grpc( The default is ``None``, in which case 2 GB (2048 MB) is used. To force a fixed size throughout the run, specify a negative number. - run_location : str, optional - MAPDL working directory. The default is the temporary working - directory. - port : int Port to launch MAPDL gRPC on. Final port will be the first port available after (or including) this port. @@ -331,204 +352,18 @@ def launch_grpc( these are already included to start up the MAPDL server. See the notes section for additional details. - override : bool, optional - Attempts to delete the lock file at the run_location. - Useful when a prior MAPDL session has exited prematurely and - the lock file has not been deleted. - - verbose : bool, optional - Print all output when launching and running MAPDL. Not - recommended unless debugging the MAPDL start. Default - ``False``. - - .. deprecated:: v0.65.0 - The ``verbose`` argument is deprecated and will be completely - removed in a future release. - Use a logger instead. See :ref:`api_logging` for more details. - - kwargs : dict - Not used. Added to keep compatibility between Mapdl_grpc and - launcher_grpc ``start_parm``s. Returns ------- - int - Returns the port number that the gRPC instance started on. - - Notes - ----- - If ``PYMAPDL_START_INSTANCE`` is set to FALSE, this ``launch_mapdl`` will - look for an existing instance of MAPDL at ``PYMAPDL_IP`` on port - ``PYMAPDL_PORT``, with defaults 127.0.0.1 and 50052 if unset. This is - typically used for automated documentation and testing. - - These are the MAPDL switch options as of 2020R2 applicable for - running MAPDL as a service via gRPC. Excluded switches such as - ``"-j"`` either not applicable or are set via keyword arguments. - - -acc - Enables the use of GPU hardware. See GPU - Accelerator Capability in the Parallel Processing Guide for more - information. - - -amfg - Enables the additive manufacturing capability. Requires - an additive manufacturing license. For general information about - this feature, see AM Process Simulation in ANSYS Workbench. - - -ansexe - Activates a custom mechanical APDL executable. - In the ANSYS Workbench environment, activates a custom - Mechanical APDL executable. - - -custom - Calls a custom Mechanical APDL executable - See Running Your Custom Executable in the Programmer's Reference - for more information. - - -db value - Initial memory allocation - Defines the portion of workspace (memory) to be used as the - initial allocation for the database. The default is 1024 - MB. Specify a negative number to force a fixed size throughout - the run; useful on small memory systems. - - -dis - Enables Distributed ANSYS - See the Parallel Processing Guide for more information. - - -dvt - Enables ANSYS DesignXplorer advanced task (add-on). - Requires DesignXplorer. - - -l - Specifies a language file to use other than English - This option is valid only if you have a translated message file - in an appropriately named subdirectory in - ``/ansys_inc/v201/ansys/docu`` or - ``Program Files\\ANSYS\\Inc\\V201\\ANSYS\\docu`` - - -m - Specifies the total size of the workspace - Workspace (memory) in megabytes used for the initial - allocation. If you omit the ``-m`` option, the default is 2 GB - (2048 MB). Specify a negative number to force a fixed size - throughout the run. - - -machines - Specifies the distributed machines - Machines on which to run a Distributed ANSYS analysis. See - Starting Distributed ANSYS in the Parallel Processing Guide for - more information. - - -mpi - Specifies the type of MPI to use. - See the Parallel Processing Guide for more information. - - -mpifile - Specifies an existing MPI file - Specifies an existing MPI file (appfile) to be used in a - Distributed ANSYS run. See Using MPI Files in the Parallel - Processing Guide for more information. - - -na - Specifies the number of GPU accelerator devices - Number of GPU devices per machine or compute node when running - with the GPU accelerator feature. See GPU Accelerator Capability - in the Parallel Processing Guide for more information. - - -name - Defines Mechanical APDL parameters - Set mechanical APDL parameters at program start-up. The parameter - name must be at least two characters long. For details about - parameters, see the ANSYS Parametric Design Language Guide. - - -p - ANSYS session product - Defines the ANSYS session product that will run during the - session. For more detailed information about the ``-p`` option, - see Selecting an ANSYS Product via the Command Line. - - -ppf - HPC license - Specifies which HPC license to use during a parallel processing - run. See HPC Licensing in the Parallel Processing Guide for more - information. - - -smp - Enables shared-memory parallelism. - See the Parallel Processing Guide for more information. - - Examples - -------- - Launch MAPDL using the default configuration. - - >>> from ansys.mapdl.core import launch_mapdl - >>> mapdl = launch_mapdl() - - Run MAPDL with shared memory parallel and specify the location of - the ansys binary. - - >>> exec_file = 'C:/Program Files/ANSYS Inc/v202/ansys/bin/winx64/ANSYS202.exe' - >>> mapdl = launch_mapdl(exec_file, additional_switches='-smp') + str + Command """ - LOG.debug("Starting 'launch_mapdl'.") - # disable all MAPDL pop-up errors: - os.environ["ANS_CMD_NODIAG"] = "TRUE" - - if verbose: - raise DeprecationError( - "The ``verbose`` argument is deprecated and will be completely removed in a future release. Use a logger instead. " - "See https://mapdl.docs.pyansys.com/version/stable/api/logging.html for more details." - ) - - # use temporary directory if run_location is unspecified - if run_location is None: - run_location = create_temp_dir() - LOG.debug(f"Using temporary directory for MAPDL run location: {run_location}") - elif not os.path.isdir(run_location): - os.mkdir(run_location) - LOG.debug(f"Creating directory for MAPDL run location: {run_location}") - - if not os.access(run_location, os.W_OK): - raise IOError('Unable to write to ``run_location`` "%s"' % run_location) - # verify version if _HAS_ATP: if version_from_path("mapdl", exec_file) < 202: raise VersionError("The MAPDL gRPC interface requires MAPDL 20.2 or later") - # verify lock file does not exist - check_lock_file(run_location, jobname, override) - - # get the next available port - if port is None: - if not pymapdl._LOCAL_PORTS: - port = MAPDL_DEFAULT_PORT - LOG.debug(f"Using default port: {port}") - else: - port = max(pymapdl._LOCAL_PORTS) + 1 - LOG.debug(f"Using next available port: {port}") - - while port_in_use(port) or port in pymapdl._LOCAL_PORTS: - port += 1 - LOG.debug(f"Port in use. Incrementing port number. port={port}") - - else: - if port_in_use(port): - proc = get_process_at_port(port) - if proc: - if is_ansys_process(proc): - raise PortAlreadyInUseByAnMAPDLInstance(port) - else: - raise PortAlreadyInUse(port) - - pymapdl._LOCAL_PORTS.append(port) - - if not nproc: - nproc = 2 - cpu_sw = "-np %d" % nproc if ram: @@ -541,31 +376,10 @@ def launch_grpc( port_sw = "-port %d" % port grpc_sw = "-grpc" - # remove any temporary error files at the run location. This is - # important because we need to know if MAPDL is already running - # here and because we're looking for any temporary files that are - # created to tell when the process has started - for filename in os.listdir(run_location): - if ".err" == filename[-4:] and jobname in filename: - if os.path.isfile(filename): - try: - os.remove(filename) - LOG.debug(f"Removing temporary error file: {filename}") - except: - raise IOError( - f"Unable to remove {filename}. There might be " - "an instance of MAPDL running at running at " - f'"{run_location}"' - ) - # Windows will spawn a new window, special treatment if os.name == "nt": - tmp_inp = ".__tmp__.inp" - with open(os.path.join(run_location, tmp_inp), "w") as f: - f.write("FINISH\r\n") - LOG.debug(f"Writing temporary input file: {tmp_inp} with 'FINISH' command.") - # must start in batch mode on windows to hide APDL window + tmp_inp = ".__tmp__.inp" command_parm = [ '"%s"' % exec_file, job_sw, @@ -597,17 +411,54 @@ def launch_grpc( ] # cleaning empty args. command = " ".join(command_parm) - LOG.debug(f"Starting MAPDL with command: {command}") + LOG.debug(f"Generated command: {command}") + return command + + +def launch_grpc( + cmd: str, + run_location: str = None, + env_vars: Optional[Dict[str, str]] = None, +) -> subprocess.Popen: + """Start MAPDL locally in gRPC mode. + + Parameters + ---------- + cmd: str + Command to use to launch the MAPDL instance. + + run_location : str, optional + MAPDL working directory. The default is the temporary working + directory. + + env_vars: dict, optional + Dictionary with the environment variables to inject in the process. + + Returns + ------- + subprocess.Popen + Process object + """ + if env_vars is None: + env_vars = {} - env_vars = update_env_vars(add_env_vars, replace_env_vars) + # disable all MAPDL pop-up errors: + env_vars.setdefault("ANS_CMD_NODIAG", "TRUE") LOG.info( - f"Running a local instance in {run_location} at port {port} the following command: '{command}'" + f"Running a local instance in {run_location} with the following command: '{cmd}'" ) + if os.name == "nt": + # getting tmp file name + tmp_inp = cmd.split()[cmd.split().index("-i") + 1] + with open(os.path.join(run_location, tmp_inp), "w") as f: + f.write("FINISH\r\n") + LOG.debug(f"Writing temporary input file: {tmp_inp} with 'FINISH' command.") + LOG.debug("MAPDL starting in background.") process = subprocess.Popen( - command, + cmd, shell=os.name != "nt", cwd=run_location, stdin=subprocess.DEVNULL, @@ -616,6 +467,39 @@ def launch_grpc( env=env_vars, ) + return process + + +def check_mapdl_launch( + process: subprocess.Popen, run_location: str, timeout: int, cmd: str +) -> None: + """Check MAPDL launching process. + + Check several things to confirm MAPDL has been launched: + + * MAPDL process: + Check process is alive still. + * File error: + Check if error file has been created. + * [On linux, but not WSL] Check if server is alive. + Read stdout looking for 'Server listening on' string. + + Parameters + ---------- + process : subprocess.Popen + MAPDL process object coming from 'launch_grpc' + run_location : str + MAPDL path. + timeout : int + Timeout + cmd : str + Command line used to launch MAPDL. Just for error printing. + + Raises + ------ + MapdlDidNotStart + MAPDL did not start. + """ LOG.debug("Generating queue object for stdout") stdout_queue, _ = _create_queue_for_std(process.stdout) @@ -635,7 +519,7 @@ def launch_grpc( msg = ( str(e) + f"\nRun location: {run_location}" - + f"\nCommand line used: {command}\n\n" + + f"\nCommand line used: {cmd}\n\n" ) terminal_output = "\n".join(_get_std_output(std_queue=stdout_queue)).strip() @@ -644,10 +528,6 @@ def launch_grpc( raise MapdlDidNotStart(msg) from e - # Ending thread - # Todo: Ending queue thread - return port, run_location, process - def _check_process_is_alive(process, run_location): if process.poll() is not None: # pragma: no cover @@ -743,8 +623,8 @@ def enqueue_output(out, queue): def launch_remote_mapdl( - version=None, - cleanup_on_exit=True, + version: str = None, + cleanup_on_exit: bool = True, ) -> MapdlGrpc: """Start MAPDL remotely using the product instance management API. @@ -789,7 +669,7 @@ def launch_remote_mapdl( ) -def get_start_instance(start_instance: bool = True): +def get_start_instance(start_instance: Optional[Union[bool, str]] = None) -> bool: """Check if the environment variable ``PYMAPDL_START_INSTANCE`` exists and is valid. Parameters @@ -816,38 +696,41 @@ def get_start_instance(start_instance: bool = True): hence the argument ``start_instance`` is overwritten. """ - if "PYMAPDL_START_INSTANCE" in os.environ and os.environ["PYMAPDL_START_INSTANCE"]: - # It should not be empty - if isinstance(start_instance, bool): - warnings.warn( - "The environment variable 'PYMAPDL_START_INSTANCE' is set, " - "hence the argument 'start_instance' is overwritten." - ) - start_instance = os.environ["PYMAPDL_START_INSTANCE"] - else: - LOG.debug( - f"PYMAPDL_START_INSTANCE is unset. Using default value {start_instance}." - ) - if isinstance(start_instance, str): - start_instance = start_instance.lower().strip() - if start_instance not in ["true", "false"]: - raise OSError( - f'Invalid value "{start_instance}" for "start_instance" (or "PYMAPDL_START_INSTANCE"\n' - '"start_instance" should be either "TRUE" or "FALSE"' - ) + def valid_start_instance(start_instance: str) -> bool: + return start_instance.lower().strip() in ["true", "false"] - LOG.debug(f"PYMAPDL_START_INSTANCE is set to {start_instance}") - return start_instance == "true" + if start_instance and os.environ.get("PYMAPDL_START_INSTANCE"): + warnings.warn( + "The environment variable 'PYMAPDL_START_INSTANCE' is " + "ignored because 'start_instance' argument is given." + ) - elif isinstance(start_instance, bool): + if isinstance(start_instance, bool): return start_instance - elif start_instance is None: - LOG.debug( - "'PYMAPDL_START_INSTANCE' is unset, and there is no supplied value. Using default, which is 'True'." - ) - return True # Default is true + elif start_instance is None or isinstance(start_instance, str): + if start_instance is None: + if os.environ.get("PYMAPDL_START_INSTANCE"): + start_instance = os.environ.get("PYMAPDL_START_INSTANCE", "") + if not valid_start_instance(start_instance): + raise OSError( + f'Invalid value "{start_instance}" for "start_instance" (or "PYMAPDL_START_INSTANCE"\n' + '"start_instance" should be either "TRUE" or "FALSE"' + ) + else: + LOG.debug( + "'PYMAPDL_START_INSTANCE' is unset, and there is no supplied value. Using default, which is 'True'." + ) + return True # Default is true + + if not valid_start_instance(start_instance): + raise ValueError( + f"The value given for 'start_instance' ({start_instance}) is invalid." + ) + + return start_instance.lower().strip() == "true" + else: raise ValueError("Only booleans are allowed as arguments.") @@ -963,7 +846,9 @@ def check_lock_file(path, jobname, override): ) -def _validate_MPI(add_sw, exec_path, force_intel=False): +def set_MPI_additional_switches( + add_sw: str, exec_path: str, force_intel: bool = False +) -> str: """Validate MPI configuration. Enforce Microsoft MPI in version 21.0 or later, to fix a @@ -989,11 +874,6 @@ def _validate_MPI(add_sw, exec_path, force_intel=False): # known issues with distributed memory parallel (DMP) if "smp" not in add_sw_lower_case: # pragma: no cover - # Ubuntu ANSYS fails to launch without I_MPI_SHM_LMT - if _is_ubuntu(): - LOG.debug("Ubuntu system detected. Adding 'I_MPI_SHM_LMT' env var.") - os.environ["I_MPI_SHM_LMT"] = "shm" - if _HAS_ATP: condition = ( os.name == "nt" @@ -1037,7 +917,16 @@ def _validate_MPI(add_sw, exec_path, force_intel=False): return add_sw -def _force_smp_student_version(add_sw, exec_path): +def configure_ubuntu(envvars: Dict[str, Any]): + # Ubuntu ANSYS fails to launch without I_MPI_SHM_LMT + if _is_ubuntu(): + LOG.debug("Ubuntu system detected. Adding 'I_MPI_SHM_LMT' env var.") + envvars["I_MPI_SHM_LMT"] = "shm" + + return envvars + + +def force_smp_in_student(add_sw, exec_path): """Force SMP in student version. Parameters @@ -1071,6 +960,7 @@ def launch_mapdl( exec_file: Optional[str] = None, run_location: Optional[str] = None, jobname: str = "file", + *, nproc: Optional[int] = None, ram: Optional[Union[int, str]] = None, mode: Optional[str] = None, @@ -1085,14 +975,14 @@ def launch_mapdl( clear_on_connect: bool = True, log_apdl: Optional[Union[bool, str]] = None, remove_temp_dir_on_exit: bool = False, - license_server_check: bool = True, + license_server_check: bool = False, license_type: Optional[bool] = None, print_com: bool = False, add_env_vars: Optional[Dict[str, str]] = None, replace_env_vars: Optional[Dict[str, str]] = None, version: Optional[Union[int, str]] = None, detect_slurm_config: bool = True, - **kwargs: Dict[str, Any], + **kwargs, ) -> Union[MapdlGrpc, "MapdlConsole"]: """Start MAPDL locally. @@ -1215,7 +1105,7 @@ def launch_mapdl( license_server_check : bool, optional Check if the license server is available if MAPDL fails to - start. Only available on ``mode='grpc'``. Defaults ``True``. + start. Only available on ``mode='grpc'``. Defaults ``False``. license_type : str, optional Enable license type selection. You can input a string for its @@ -1429,397 +1319,214 @@ def launch_mapdl( "ANSYSLMD_LICENSE_FILE":"1055@MYSERVER"} >>> mapdl = launch_mapdl(replace_env_vars=my_env_vars) """ - # By default - ON_SLURM = os.environ.get("PYMAPDL_ON_SLURM", None) - if ON_SLURM is None: - ON_SLURM = True - else: - # Unless the env var is false, it will be true. - ON_SLURM = not (ON_SLURM.lower() == "false") + ######################################## + # Processing arguments + # -------------------- + # + # packing arguments + args = pack_arguments(locals()) # packs args and kwargs - # Let's require the following env vars to exist to go into slurm mode. - ON_SLURM = ( - ON_SLURM - and bool(os.environ.get("SLURM_JOB_NAME", "")) - and bool(os.environ.get("SLURM_JOB_ID", "")) - ) + check_kwargs(args) # check if passing wrong arguments + + pre_check_args(args) - if detect_slurm_config and ON_SLURM: + # SLURM settings + if is_on_slurm(args): LOG.info("On Slurm mode.") # extracting parameters - exec_file, jobname, nproc, ram, additional_switches = _parse_slurm_options( - exec_file, - jobname, - nproc, - ram, - additional_switches, - **kwargs, - ) - # To avoid timeouts - license_server_check = False - start_timeout = 2 * start_timeout - ON_SLURM = True # Using this as main variable - else: - ON_SLURM = False + get_slurm_options(args, kwargs) - # These parameters are partially used for unit testing - set_no_abort = kwargs.pop("set_no_abort", True) + get_cpus(args) - # Extract arguments: - force_intel = kwargs.pop("force_intel", False) - use_vtk = kwargs.pop("use_vtk", None) - just_launch = kwargs.pop("just_launch", None) - on_pool = kwargs.pop("on_pool", False) - _debug_no_launch = kwargs.pop("_debug_no_launch", None) + get_start_instance_arg(args) - # Transferring MAPDL arguments to start_parameters: - start_parm = {} + get_ip(args) - kwargs_keys = list(kwargs.keys()) - for each_par in kwargs_keys: - if each_par in _ALLOWED_START_PARM: - start_parm[each_par] = kwargs.pop(each_par) + args["port"] = get_port(args["port"], args["start_instance"]) - # Raising error if using non-allowed arguments - if kwargs: - ms_ = ", ".join([f"'{each}'" for each in kwargs.keys()]) - raise ValueError(f"The following arguments are not recognized: {ms_}") + get_exec_file(args) - # Getting IP from env var - ip_env_var = os.environ.get("PYMAPDL_IP", "") - if ip_env_var != "": - if ip: - warnings.warn( - "The env var 'PYMAPDL_IP' is set, hence the 'ip' argument is overwritten." - ) + args["version"] = get_version(args["version"], exec_file) - ip = ip_env_var - LOG.debug(f"An IP ({ip}) has been set using 'PYMAPDL_IP' env var.") + if args["start_instance"]: + ######################################## + # Local adjustments + # ----------------- + # + # Only when starting MAPDL (aka Local) - ip = None if ip == "" else ip # Making sure the variable is not empty + get_run_location(args) - # Getting "start_instance" using "True" as default. - if (ip is not None) and (start_instance is None): - # An IP has been supplied. By default, 'start_instance' is equal - # false, unless it is set through the env vars. - start_instance = get_start_instance(start_instance=False) - else: - start_instance = get_start_instance(start_instance=start_instance) + # verify lock file does not exist + check_lock_file(args["run_location"], args["jobname"], args["override"]) - LOG.debug("Using 'start_instance' equal to %s.", start_instance) + # remove err file so we can track its creation + # (as way to check if MAPDL started or not) + remove_err_files(args["run_location"], args["jobname"]) - if ip is None: - if ON_WSL: - ip = _get_windows_host_ip() - if ip: - LOG.debug( - f"On WSL: Using the following IP address for the Windows OS host: {ip}" - ) - else: - raise MapdlDidNotStart( - "You seems to be working from WSL.\n" - "Unfortunately, PyMAPDL could not find the IP address of the Windows host machine." - ) + if _HAS_ATP and not args["_debug_no_launch"]: + version = version_from_path("mapdl", args["exec_file"]) + args["mode"] = check_mode(args["mode"], version) - if not ip: - LOG.debug( - f"No IP address was supplied. Using the default IP address: {LOCALHOST}" - ) - ip = LOCALHOST + if not args["mode"]: + args["mode"] = "grpc" - else: - LOG.debug( - "Because 'PYMAPDL_IP' is not None, an attempt is made to connect to" - " a remote session ('START_INSTANCE' is set to 'False')." - ) - if ON_WSL: - LOG.debug("On WSL: Allowing 'start_instance' and 'ip' arguments together.") - else: - if start_instance is True and not on_pool: - raise ValueError( - "When providing a value for the argument 'ip', the argument " - "'start_instance' cannot be 'True'.\n" - "Make sure the corresponding environment variables are not setting " - "those argument values.\n" - "For more information visit https://github.com/ansys/pymapdl/issues/2910" - ) + LOG.debug(f"Using mode {args['mode']}") - ip = socket.gethostbyname(ip) # Converting ip or hostname to ip + args["additional_switches"] = set_license_switch( + args["license_type"], args["additional_switches"] + ) - check_valid_ip(ip) # double check + env_vars = update_env_vars(args["add_env_vars"], args["replace_env_vars"]) - if port is None: - port = int(os.environ.get("PYMAPDL_PORT", MAPDL_DEFAULT_PORT)) - check_valid_port(port) - LOG.debug(f"Using default port {port}") + ######################################## + # Context specific launching adjustments + # -------------------------------------- + # + if args["start_instance"]: + env_vars = configure_ubuntu(env_vars) - # verify version - if exec_file and version: - raise ValueError("Cannot specify both ``exec_file`` and ``version``.") + # Set SMP by default if student version is used. + args["additional_switches"] = force_smp_in_student( + args["additional_switches"], args["exec_file"] + ) - if version is None: - version = os.getenv("PYMAPDL_MAPDL_VERSION", None) + # Set compatible MPI + args["additional_switches"] = set_MPI_additional_switches( + args["additional_switches"], + args["exec_file"], + force_intel=args["force_intel"], + ) - # Start MAPDL with PyPIM if the environment is configured for it - # and the user did not pass a directive on how to launch it. - if _HAS_PIM and exec_file is None and pypim.is_configured(): - LOG.info("Starting MAPDL remotely. The startup configuration will be ignored.") - if version: - version = str(version) - else: - version = None - - return launch_remote_mapdl(cleanup_on_exit=cleanup_on_exit, version=version) - - version = _verify_version(version) # return a int version or none - - if start_instance: - # special handling when building the gallery outside of CI. This - # creates an instance of mapdl the first time. - if pymapdl.BUILDING_GALLERY: # pragma: no cover - LOG.debug("Building gallery.") - # launch an instance of pymapdl if it does not already exist and - # we're allowed to start instances - if GALLERY_INSTANCE[0] is None: - LOG.debug("Loading first MAPDL instance for gallery building.") - GALLERY_INSTANCE[0] = "Loading..." - mapdl = launch_mapdl( - start_instance=True, - cleanup_on_exit=False, - loglevel=loglevel, - set_no_abort=set_no_abort, - **start_parm, - ) - GALLERY_INSTANCE[0] = {"ip": mapdl._ip, "port": mapdl._port} - return mapdl + LOG.debug(f"Using additional switches {args['additional_switches']}.") - # otherwise, connect to the existing gallery instance if available, but it needs to be fully loaded. - elif GALLERY_INSTANCE[0] != "Loading...": - LOG.debug( - "Connecting to an existing MAPDL instance for gallery building." - ) - mapdl = MapdlGrpc( - ip=GALLERY_INSTANCE[0]["ip"], - port=GALLERY_INSTANCE[0]["port"], - cleanup_on_exit=False, - loglevel=loglevel, - set_no_abort=set_no_abort, - use_vtk=use_vtk, - **start_parm, - ) - if clear_on_connect: - mapdl.clear() - return mapdl + start_parm = generate_start_parameters(args) - else: - LOG.debug("Bypassing Gallery building flag for the first time.") + if _HAS_PIM and exec_file is None and pypim.is_configured(): + # Start MAPDL with PyPIM if the environment is configured for it + # and the user did not pass a directive on how to launch it. + LOG.info("Starting MAPDL remotely. The startup configuration will be ignored.") - else: - LOG.debug("Connecting to an existing instance of MAPDL at %s:%s", ip, port) + return launch_remote_mapdl( + cleanup_on_exit=args["cleanup_on_exit"], version=args["version"] + ) - if just_launch: - print(f"There is an existing MAPDL instance at: {ip}:{port}") - return + # Early exit for debugging. + if args["_debug_no_launch"]: + # Early exit, just for testing + return args # type: ignore - if _debug_no_launch: - return pack_parameters(locals()) # type: ignore + if not args["start_instance"]: + ######################################## + # Remote launching + # ---------------- + # + LOG.debug( + f"Connecting to an existing instance of MAPDL at {args['ip']}:{args['port']}" + ) mapdl = MapdlGrpc( - ip=ip, - port=port, cleanup_on_exit=False, - loglevel=loglevel, - set_no_abort=set_no_abort, - use_vtk=use_vtk, - log_apdl=log_apdl, + loglevel=args["loglevel"], + set_no_abort=args["set_no_abort"], + use_vtk=args["use_vtk"], + log_apdl=args["log_apdl"], **start_parm, ) - if clear_on_connect: + if args["clear_on_connect"]: mapdl.clear() return mapdl - # verify executable - if exec_file is None: - exec_file = os.getenv("PYMAPDL_MAPDL_EXEC", None) - - if exec_file is None: - if not _HAS_ATP: - raise ModuleNotFoundError( - "If you don't have 'ansys-tools-path' library installed, you need to input the executable path ('exec_path')." - ) - - LOG.debug("Using default executable.") - # Load cached path - if _debug_no_launch: - exec_file = "" - else: - exec_file = get_ansys_path(version=version) - - if exec_file is None: - raise FileNotFoundError( - "Invalid exec_file path or cannot load cached " - "mapdl path. Enter one manually by specifying " - "exec_file=" - ) - else: # verify ansys exists at this location - if not os.path.isfile(exec_file): - raise FileNotFoundError( - f'Invalid MAPDL executable at "{exec_file}"\n' - "Enter one manually using exec_file=" - ) - - # verify run location - if run_location is None: - LOG.debug("Using default run location.") - temp_dir = tempfile.gettempdir() - run_location = os.path.join(temp_dir, "ansys_%s" % random_string(10)) - if not os.path.isdir(run_location): - try: - os.mkdir(run_location) - LOG.debug("Created run location at %s", run_location) - except: - raise MapdlRuntimeError( - "Unable to create the temporary working " - f'directory "{run_location}"\n' - "Please specify run_location=" - ) - else: - if not os.path.isdir(run_location): - raise FileNotFoundError(f'"{run_location}" is not a valid directory') - if remove_temp_dir_on_exit: - LOG.info("`run_location` set. Disabling the removal of temporary files.") - remove_temp_dir_on_exit = False - - LOG.debug("Using run location at %s", run_location) - - # verify no lock file and the mode is valid - check_lock_file(run_location, jobname, override) - - if _HAS_ATP and not _debug_no_launch: - mode = check_mode(mode, version_from_path("mapdl", exec_file)) - LOG.debug("Using mode %s", mode) - else: - mode = "grpc" - - # Setting SMP by default if student version is used. - additional_switches = _force_smp_student_version(additional_switches, exec_file) - + ######################################## + # Sphinx docs adjustments + # ----------------------- + # + # special handling when building the gallery outside of CI. This + # creates an instance of mapdl the first time. + if pymapdl.BUILDING_GALLERY: # pragma: no cover + return create_gallery_instances(args, start_parm) + + ######################################## + # Local launching + # --------------- # - additional_switches = _validate_MPI( - additional_switches, exec_file, force_intel=force_intel - ) - - additional_switches = _check_license_argument(license_type, additional_switches) - LOG.debug(f"Using additional switches {additional_switches}.") - - # Bypassing number of processors checks because VDI/VNC might have - # different number of processors than the cluster compute nodes. - if not ON_SLURM: - # Setting number of processors - machine_cores = psutil.cpu_count(logical=False) - - if not nproc: - # Some machines only have 1 core - nproc = machine_cores if machine_cores < 2 else 2 - else: - if machine_cores < int(nproc): - raise NotEnoughResources( - f"The machine has {machine_cores} cores. PyMAPDL is asking for {nproc} cores." - ) - - # Setting env vars - env_vars = update_env_vars(add_env_vars, replace_env_vars) - - start_parm.update( - { - "exec_file": exec_file, - "run_location": run_location, - "additional_switches": additional_switches, - "jobname": jobname, - "nproc": nproc, - "print_com": print_com, - } - ) - - if mode == "console": - start_parm["start_timeout"] = start_timeout - - else: - start_parm["ram"] = ram - start_parm["override"] = override - start_parm["timeout"] = start_timeout - - LOG.debug(f"Using start parameters {start_parm}") - # Check the license server - if license_server_check: + if args["license_server_check"]: LOG.debug("Checking license server.") - lic_check = LicenseChecker(timeout=start_timeout) + lic_check = LicenseChecker(timeout=args["start_timeout"]) lic_check.start() try: LOG.debug("Starting MAPDL") - if mode == "console": + if args["mode"] == "console": from ansys.mapdl.core.mapdl_console import MapdlConsole mapdl = MapdlConsole( - loglevel=loglevel, log_apdl=log_apdl, use_vtk=use_vtk, **start_parm + loglevel=args["loglevel"], + log_apdl=args["log_apdl"], + use_vtk=args["use_vtk"], + **start_parm, ) - elif mode == "grpc": - if _debug_no_launch: - # Early exit, just for testing - return pack_parameters(locals()) # type: ignore + elif args["mode"] == "grpc": - port, actual_run_location, process = launch_grpc( - port=port, - replace_env_vars=env_vars, - **start_parm, + cmd = generate_mapdl_launch_command( + exec_file=args["exec_file"], + jobname=args["jobname"], + nproc=args["nproc"], + ram=args["ram"], + port=args["port"], + additional_switches=args["additional_switches"], + ) + + process = launch_grpc( + cmd=cmd, run_location=args["run_location"], env_vars=env_vars ) - if just_launch: - out = [ip, port] + check_mapdl_launch( + process, args["run_location"], args["start_timeout"], cmd + ) + + if args["just_launch"]: + out = [args["ip"], args["port"]] if hasattr(process, "pid"): out += [process.pid] return out mapdl = MapdlGrpc( - ip=ip, - port=port, - cleanup_on_exit=cleanup_on_exit, - loglevel=loglevel, - set_no_abort=set_no_abort, - remove_temp_dir_on_exit=remove_temp_dir_on_exit, - log_apdl=log_apdl, + cleanup_on_exit=args["cleanup_on_exit"], + loglevel=args["loglevel"], + set_no_abort=args["set_no_abort"], + remove_temp_dir_on_exit=args["remove_temp_dir_on_exit"], + log_apdl=args["log_apdl"], process=process, - use_vtk=use_vtk, + use_vtk=args["use_vtk"], **start_parm, ) - if run_location is None: - mapdl._path = actual_run_location # Setting launched property mapdl._launched = True + mapdl._env_vars = env_vars except Exception as exception: # Failed to launch for some reason. Check if failure was due # to the license check - if license_server_check: + if args["license_server_check"]: LOG.debug("Checking license server.") lic_check.check() raise exception # Stopping license checker - if license_server_check: + if args["license_server_check"]: LOG.debug("Stopping license server check.") lic_check.is_connected = True return mapdl -def check_mode(mode, version): +def check_mode(mode: ALLOWABLE_MODES, version: ALLOWABLE_VERSION_INT): """Check if the MAPDL server mode matches the allowable version If ``None``, the newest mode will be selected. @@ -1921,7 +1628,7 @@ def update_env_vars(add_env_vars: dict, replace_env_vars: dict) -> dict: return envvars -def _check_license_argument(license_type, additional_switches): +def set_license_switch(license_type, additional_switches): if isinstance(license_type, str): # In newer license server versions an invalid license name just get discarded and produces no effect or warning. # For example: @@ -1953,7 +1660,7 @@ def _check_license_argument(license_type, additional_switches): warn_text = ( f"The keyword argument 'license_type' value ('{license_type}') is not a recognized\n" "license name or has been deprecated.\n" - "Still PyMAPDL will try to use it but in older versions you might experience\n" + "Still PyMAPDL will try to use it but in older MAPDL versions you might experience\n" "problems connecting to the server.\n" f"Recognized license names: {' '.join(allow_lics)}" ) @@ -1975,7 +1682,7 @@ def _check_license_argument(license_type, additional_switches): warn_text = ( f"The additional switch product value ('-p {license_type}') is not a recognized\n" "license name or has been deprecated.\n" - "Still PyMAPDL will try to use it but in older versions you might experience\n" + "Still PyMAPDL will try to use it but in older MAPDL versions you might experience\n" "problems connecting to the server.\n" f"Recognized license names: {' '.join(allow_lics)}" ) @@ -1992,38 +1699,6 @@ def _check_license_argument(license_type, additional_switches): return additional_switches -def _verify_version(version): - """Verify the MAPDL version is valid.""" - if isinstance(version, float): - version = int(version * 10) - - if isinstance(version, str): - if version.lower().strip() == "latest": - return None # Default behaviour is latest - - elif version.upper().strip() in [ - str(each) for each in SUPPORTED_ANSYS_VERSIONS.keys() - ]: - version = int(version) - elif version.upper().strip() in [ - str(each / 10) for each in SUPPORTED_ANSYS_VERSIONS.keys() - ]: - version = int(float(version) * 10) - elif version.upper().strip() in SUPPORTED_ANSYS_VERSIONS.values(): - version = [ - key - for key, value in SUPPORTED_ANSYS_VERSIONS.items() - if value == version.upper().strip() - ][0] - - if version is not None and version not in SUPPORTED_ANSYS_VERSIONS.keys(): - raise ValueError( - f"MAPDL version must be one of the following: {list(SUPPORTED_ANSYS_VERSIONS.keys())}" - ) - - return version - - def _get_windows_host_ip(): output = _run_ip_route() if output: @@ -2052,21 +1727,17 @@ def _parse_ip_route(output): return match[0] -def _parse_slurm_options( - exec_file: Optional[str], - jobname: str, - nproc: Optional[int], - ram: Optional[Union[str, int]], - additional_switches: str, - **kwargs: Dict[str, Any], -): +def get_slurm_options( + args: Dict[str, Any], + kwargs: Dict[str, Any], +) -> Dict[str, Any]: def get_value( variable: str, kwargs: Dict[str, Any], default: Optional[Union[str, int, float]] = 1, astype: Optional[Callable[[Any], Any]] = int, ): - value_from_env_vars = os.environ.get(variable, None) + value_from_env_vars = os.environ.get(variable) value_from_kwargs = kwargs.pop(variable, None) value = value_from_kwargs or value_from_env_vars or default if astype and value: @@ -2102,8 +1773,8 @@ def get_value( LOG.info(f"SLURM_CPUS_ON_NODE: {SLURM_CPUS_ON_NODE}") SLURM_MEM_PER_NODE = get_value( - "SLURM_MEM_PER_NODE", kwargs, default=None, astype=None - ) + "SLURM_MEM_PER_NODE", kwargs, default="", astype=str + ).upper() LOG.info(f"SLURM_MEM_PER_NODE: {SLURM_MEM_PER_NODE}") SLURM_NODELIST = get_value( @@ -2111,124 +1782,586 @@ def get_value( ).lower() LOG.info(f"SLURM_NODELIST: {SLURM_NODELIST}") - if not exec_file: - exec_file = os.environ.get("PYMAPDL_MAPDL_EXEC", None) + if not args["exec_file"]: + args["exec_file"] = os.environ.get("PYMAPDL_MAPDL_EXEC") - if not exec_file: + if not args["exec_file"]: # We should probably make a way to find it. # We will use the module thing pass - LOG.info(f"Using MAPDL executable in: {exec_file}") + LOG.info(f"Using MAPDL executable in: {args['exec_file']}") - if not jobname: - jobname = os.environ.get("SLURM_JOB_NAME", "file") - LOG.info(f"Using jobname: {jobname}") + if not args["jobname"]: + args["jobname"] = os.environ.get("SLURM_JOB_NAME", "file") + LOG.info(f"Using jobname: {args['jobname']}") # Checking specific env var - if not nproc: - nproc = os.environ.get("PYMAPDL_NPROC", None) - if nproc: - nproc = int(nproc) - - if not nproc: + if not args["nproc"]: ## Attempt to calculate the appropriate number of cores: # Reference: https://stackoverflow.com/a/51141287/6650211 # I'm assuming the env var makes sense. # # - SLURM_CPUS_ON_NODE is a property of the cluster, not of the job. # - options = [ - # 4, # Fall back option - SLURM_CPUS_PER_TASK * SLURM_NTASKS, # (CPUs) - SLURM_NPROCS, # (CPUs) - # SLURM_NTASKS, # (tasks) Not necessary the number of CPUs, - # SLURM_NNODES * SLURM_TASKS_PER_NODE * SLURM_CPUS_PER_TASK, # (CPUs) - SLURM_CPUS_ON_NODE * SLURM_NNODES, # (cpus) - ] + options = max( + [ + # 4, # Fall back option + SLURM_CPUS_PER_TASK * SLURM_NTASKS, # (CPUs) + SLURM_NPROCS, # (CPUs) + # SLURM_NTASKS, # (tasks) Not necessary the number of CPUs, + # SLURM_NNODES * SLURM_TASKS_PER_NODE * SLURM_CPUS_PER_TASK, # (CPUs) + SLURM_CPUS_ON_NODE * SLURM_NNODES, # (cpus) + ] + ) LOG.info(f"On SLURM number of processors options {options}") - nproc = max(options) - LOG.info(f"Setting number of CPUs to: {nproc}") + args["nproc"] = int(os.environ.get("PYMAPDL_NPROC", options)) + + LOG.info(f"Setting number of CPUs to: {args['nproc']}") - if not ram: + if not args["ram"]: if SLURM_MEM_PER_NODE: # RAM argument is in MB, so we need to convert - - if SLURM_MEM_PER_NODE[-1] == "T": # tera - ram = int(SLURM_MEM_PER_NODE[:-1]) * (2**10) ** 2 - elif SLURM_MEM_PER_NODE[-1] == "G": # giga - ram = int(SLURM_MEM_PER_NODE[:-1]) * (2**10) ** 1 - elif SLURM_MEM_PER_NODE[-1].upper() == "k": # kilo - ram = int(SLURM_MEM_PER_NODE[:-1]) * (2**10) ** (-1) + units = None + if SLURM_MEM_PER_NODE[-1].isalpha(): + units = SLURM_MEM_PER_NODE[-1] + ram = SLURM_MEM_PER_NODE[:-1] + else: + units = None + ram = SLURM_MEM_PER_NODE + + if not units: + args["ram"] = int(ram) + elif units == "T": # tera + args["ram"] = int(ram) * (2**10) ** 2 + elif units == "G": # giga + args["ram"] = int(ram) * (2**10) ** 1 + elif units == "M": # mega + args["ram"] = int(ram) + elif units == "K": # kilo + args["ram"] = int(ram) * (2**10) ** (-1) else: # Mega - ram = int(SLURM_MEM_PER_NODE) + raise ValueError( + "The memory defined in 'SLURM_MEM_PER_NODE' env var(" + f"'{SLURM_MEM_PER_NODE}') is not valid." + ) - LOG.info(f"Setting RAM to: {ram}") + LOG.info(f"Setting RAM to: {args['ram']}") # We use "-dis " (with space) to avoid collision with user variables such # as `-distro` or so - if "-dis " not in additional_switches and not additional_switches.endswith("-dis"): - additional_switches += " -dis" + if "-dis " not in args["additional_switches"] and not args[ + "additional_switches" + ].endswith("-dis"): + args["additional_switches"] += " -dis" - ## Getting the node list - machines = "" - # parsing nodes to list - if SLURM_NODELIST: - try: - p = subprocess.Popen( - ["scontrol", "show", "hostnames", f"{SLURM_NODELIST}"], - stderr=subprocess.PIPE, - stdout=subprocess.PIPE, - ) - stderr = p.stderr.read().decode() - stdout = p.stdout.read().decode() + # Finally set to avoid timeouts + args["license_server_check"] = False + args["start_timeout"] = 2 * args["start_timeout"] - if "Invalid hostlist" in stderr: - raise ValueError( - "The node list is invalid, or it could not be parsed.\n", - "Are you passing the nodes correctly?\n", - f"Nodes list: {SLURM_NODELIST}", + return args + + +def pack_arguments(locals_): + args = {} + for each in ALLOWABLE_LAUNCH_MAPDL_ARGS: + if each in locals_: + args[each] = locals_[each] + + args["kwargs"] = locals_["kwargs"] + args.update(locals_["kwargs"]) # attaching kwargs + + args["set_no_abort"] = locals_.get( + "set_no_abort", locals_["kwargs"].get("set_no_abort", None) + ) + args["force_intel"] = locals_.get( + "force_intel", locals_["kwargs"].get("force_intel", None) + ) + args["broadcast"] = locals_.get( + "broadcast", locals_["kwargs"].get("broadcast", None) + ) + args["use_vtk"] = locals_.get("use_vtk", locals_["kwargs"].get("use_vtk", None)) + args["just_launch"] = locals_.get( + "just_launch", locals_["kwargs"].get("just_launch", None) + ) + args["on_pool"] = locals_.get("on_pool", locals_["kwargs"].get("on_pool", None)) + args["_debug_no_launch"] = locals_.get( + "_debug_no_launch", locals_["kwargs"].get("_debug_no_launch", None) + ) + + return args + + +def is_on_slurm(args: Dict[str, Any]) -> bool: + + args["ON_SLURM"] = os.environ.get("PYMAPDL_ON_SLURM", "True") + + is_flag_false = args["ON_SLURM"].lower() == "false" + + # Let's require the following env vars to exist to go into slurm mode. + args["ON_SLURM"] = bool( + args["detect_slurm_config"] + and not is_flag_false # default is true + and os.environ.get("SLURM_JOB_NAME") + and os.environ.get("SLURM_JOB_ID") + ) + return args["ON_SLURM"] + + +def generate_start_parameters(args: Dict[str, Any]) -> Dict[str, Any]: + """Generate start parameters + + Generate a dict with the parameters for launching MAPDL. + + Parameters + ---------- + args : Dict[str, Any] + Args dictionary + + Returns + ------- + Dict[str, Any] + start_param dictionary + + Raises + ------ + ValueError + If there are keys in kwargs after inject them all allowed keys, it means + a non-allowed key was used. + """ + # Transferring MAPDL arguments to start_parameters: + start_parm = {} + + for each_par in _ALLOWED_START_PARM: + if each_par in args: + start_parm[each_par] = args[each_par] + + if args["mode"] == "console": + start_parm["start_timeout"] = args["start_timeout"] + + else: + start_parm["ram"] = args["ram"] + start_parm["override"] = args["override"] + start_parm["timeout"] = args["start_timeout"] + + LOG.debug(f"Using start parameters {start_parm}") + return start_parm + + +def get_ip_env_var() -> str: + """Get IP from 'PYMAPDL_IP' env var""" + + # Getting IP from env var + ip_env_var = os.environ.get("PYMAPDL_IP", "") + + if ip_env_var != "": + LOG.debug(f"An IP ({ip_env_var}) has been set using 'PYMAPDL_IP' env var.") + return ip_env_var + + +def get_ip(args: Dict[str, Any]) -> None: + """Get IP from env var or arguments + + The environment variable value has priority over the argument. + + Parameters + ---------- + args : Dict[str, Any] + Arguments dict + + Raises + ------ + MapdlDidNotStart + Windows host IP could not be found. + ValueError + 'start_instance' and 'ip' arguments are incompatible. + """ + if args["ip"] in [None, ""]: + + args["ip"] = get_ip_env_var() + + if not args["ip"] and ON_WSL: + args["ip"] = _get_windows_host_ip() + if args["ip"]: + LOG.debug( + f"On WSL: Using the following IP address for the Windows OS host: {args['ip']}" + ) + else: + raise MapdlDidNotStart( + "You seems to be working from WSL.\n" + "Unfortunately, PyMAPDL could not find the IP address of the Windows host machine." ) - if stderr: - raise RuntimeError(stderr) - nodes = stdout.strip().splitlines() - machines = ":".join([f"{each_node}" for each_node in nodes]) + if not args["ip"]: + LOG.debug( + f"No IP address was supplied. Using the default IP address: {LOCALHOST}" + ) + args["ip"] = LOCALHOST + + # Converting ip or hostname to ip + args["ip"] = socket.gethostbyname(args["ip"]) + check_valid_ip(args["ip"]) # double check + + +def get_start_instance_arg(args: Dict[str, Any]) -> None: + """Get start instance argument + + Parameters + ---------- + args : Dict[str, Any] + Arguments dict + """ + ip_envar = get_ip_env_var() not in ["", None] + + if (args["ip"] not in [None, ""] or ip_envar) and (args["start_instance"] is None): + # An IP has been supplied. By default, 'start_instance' is equal + # false, unless it is set through the env vars (which has preference) + args["start_instance"] = False + + args["start_instance"] = get_start_instance(start_instance=args["start_instance"]) + LOG.debug(f"Using 'start_instance' equal to {args['start_instance']}") + - # The following code creates the cmd line bit for MAPDL. It seems it - # is not needed in slurm. - # machines = " -machines " + ":".join([ - # f"{each_node}:{SLURM_CPUS_ON_NODE}" for each_node in nodes - # ]) +def get_port(port: Optional[int] = None, start_instance: Optional[bool] = None) -> int: + """Get port argument. - # We do not need to inject the machines in MAPDL command line. - # additional_switches += machines - LOG.info(f"Using nodes configuration: {machines}") + Parameters + ---------- + port : Optional[int] + Port given as argument. + + Returns + ------- + int + Port + """ + if port is None: + if os.environ.get("PYMAPDL_PORT"): + LOG.debug(f"Using port from 'PYMAPDL_PORT' env var: {port}") + return int(os.environ.get("PYMAPDL_PORT")) + + if not pymapdl._LOCAL_PORTS: + port = MAPDL_DEFAULT_PORT + LOG.debug(f"Using default port: {port}") + else: + port = max(pymapdl._LOCAL_PORTS) + 1 + LOG.debug(f"Using next available port: {port}") + + while (port_in_use(port) and start_instance) or port in pymapdl._LOCAL_PORTS: + port += 1 + LOG.debug(f"Port in use. Incrementing port number. port={port}") + + else: + if port_in_use(port): + proc = get_process_at_port(port) + if proc: + if is_ansys_process(proc): + raise PortAlreadyInUseByAnMAPDLInstance(port) + else: + raise PortAlreadyInUse(port) + + pymapdl._LOCAL_PORTS.append(port) + + check_valid_port(port) + LOG.debug(f"Using default port {port}") + + return port + + +def get_version( + version: Optional[Union[str, int]] = None, + exec_file: Optional[str] = None, +) -> Optional[int]: + """Get MAPDL version + + Parameters + ---------- + version : Optional[Union[str, int]], optional + Version argument, by default None + + Returns + ------- + Optional[int] + The version as XYZ or None. + + Raises + ------ + ValueError + MAPDL version must be one of the following + """ + if not version: + version = os.getenv("PYMAPDL_MAPDL_VERSION") + + if not version: + # Early exit + return + + if isinstance(version, float): + version = int(version * 10) + + if isinstance(version, str): + if version.lower().strip() == "latest": + return None # Default behaviour is latest + + elif version.upper().strip() in [str(each) for each in ALLOWABLE_VERSION_INT]: + version = int(version) + elif version.upper().strip() in [ + str(each / 10) for each in ALLOWABLE_VERSION_INT + ]: + version = int(float(version) * 10) + elif version.upper().strip() in SUPPORTED_ANSYS_VERSIONS.values(): + version = [ + key + for key, value in SUPPORTED_ANSYS_VERSIONS.items() + if value == version.upper().strip() + ][0] + + if version is not None and version not in ALLOWABLE_VERSION_INT: + raise ValueError( + f"MAPDL version must be one of the following: {list(ALLOWABLE_VERSION_INT)}" + ) + + return version # return a int version or none + + +def create_gallery_instances( + args: Dict[str, Any], start_parm: Dict[str, Any] +) -> MapdlGrpc: # pragma: no cover + """Create MAPDL instances for the documentation gallery built. + + This function is not tested with Pytest, but it is used during CICD docs + building. + + Parameters + ---------- + args : Dict[str, Any] + Arguments dict + start_parm : Dict[str, Any] + MAPDL start parameters + + Returns + ------- + MapdlGrpc + MAPDL instance + """ + LOG.debug("Building gallery.") + # launch an instance of pymapdl if it does not already exist and + # we're allowed to start instances + if GALLERY_INSTANCE[0] is None: + LOG.debug("Loading first MAPDL instance for gallery building.") + GALLERY_INSTANCE[0] = "Loading..." + mapdl = launch_mapdl( + start_instance=True, + cleanup_on_exit=False, + loglevel=args["loglevel"], + set_no_abort=args["set_no_abort"], + **start_parm, + ) + GALLERY_INSTANCE[0] = {"ip": mapdl._ip, "port": mapdl._port} + return mapdl + + # otherwise, connect to the existing gallery instance if available, but it needs to be fully loaded. + else: + while not isinstance(GALLERY_INSTANCE[0], dict): + # Waiting for MAPDL instance to be ready + time.sleep(0.1) + + LOG.debug("Connecting to an existing MAPDL instance for gallery building.") + start_parm.pop("ip", None) + start_parm.pop("port", None) + mapdl = MapdlGrpc( + ip=GALLERY_INSTANCE[0]["ip"], + port=GALLERY_INSTANCE[0]["port"], + cleanup_on_exit=False, + loglevel=args["loglevel"], + set_no_abort=args["set_no_abort"], + use_vtk=args["use_vtk"], + **start_parm, + ) + if args["clear_on_connect"]: + mapdl.clear() + return mapdl - except Exception as e: + +def get_exec_file(args: Dict[str, Any]) -> None: + """Get exec file argument + + Parameters + ---------- + args : Dict[str, Any] + Arguments dictionary + + Raises + ------ + ModuleNotFoundError + 'ansys-tools-path' library could not be found + FileNotFoundError + Invalid exec_file path or cannot load cached MAPDL path. + FileNotFoundError + Invalid MAPDL executable + """ + + args["exec_file"] = os.getenv("PYMAPDL_MAPDL_EXEC", args.get("exec_file")) + + if not args["start_instance"] and args["exec_file"] is None: + # 'exec_file' is not needed if the instance is not going to be launch + args["exec_file"] = "" + return + + if args["exec_file"] is None: + if not _HAS_ATP: + raise ModuleNotFoundError( + "If you don't have 'ansys-tools-path' library installed, you need " + "to input the executable path ('exec_file' argument) or use the " + "'PYMAPDL_MAPDL_EXEC' environment variable." + ) + + if args.get("_debug_no_launch", False): + args["exec_file"] = "" + return + + LOG.debug("Using default executable.") + args["exec_file"] = get_ansys_path(version=args.get("version")) + + # Edge case + if args["exec_file"] is None: + raise FileNotFoundError( + "Invalid exec_file path or cannot load cached " + "MAPDL path. Enter one manually by specifying " + "'exec_file' argument." + ) + else: # verify ansys exists at this location + if not os.path.isfile(args["exec_file"]): + raise FileNotFoundError( + f'Invalid MAPDL executable at "{args["exec_file"]}"\n' + "Enter one manually using exec_file=" + ) + + +def get_run_location(args: Dict[str, Any]) -> None: + """Get run_location argument. + + It can change 'remove_temp_dir_on_exit' argument's value. + + Parameters + ---------- + args : Dict[str, Any] + Arguments dict + + Raises + ------ + FileNotFoundError + _description_ + """ + if args["run_location"] is None: + args["run_location"] = create_temp_dir() + LOG.debug( + f"Using default temporary directory for MAPDL run location: {args['run_location']}" + ) + + elif not os.path.isdir(args["run_location"]): + os.makedirs(args["run_location"], exist_ok=True) + LOG.debug(f"Creating directory for MAPDL run location: {args['run_location']}") + + if args.get("remove_temp_dir_on_exit"): LOG.info( - f"The machines list could not be obtained.\nThis error occurred:\n{str(e)}" + "The 'run_location' argument is set. Disabling the removal of temporary files." ) + args["remove_temp_dir_on_exit"] = False + + elif not os.access(args["run_location"], os.W_OK): + raise IOError(f'Unable to write to ``run_location``: {args["run_location"]}') + + LOG.debug("Using run location at %s", args["run_location"]) + + +def check_kwargs(args: Dict[str, Any]): + """Check all the kwargs are valid. + + Parameters + ---------- + args : Dict[str, Any] + Arguments dict - return exec_file, jobname, nproc, ram, additional_switches - - -def pack_parameters(locals_var): - # pack all the arguments in a dict for debugging purposes - # We prefer to explicitly output the desired output - dict_ = {} - dict_["port"] = locals_var["port"] - dict_["ip"] = locals_var["ip"] - dict_["add_env_vars"] = locals_var["add_env_vars"] - dict_["replace_env_vars"] = locals_var["replace_env_vars"] - dict_["cleanup_on_exit"] = locals_var["cleanup_on_exit"] - dict_["loglevel"] = locals_var["loglevel"] - dict_["set_no_abort"] = locals_var["set_no_abort"] - dict_["remove_temp_dir_on_exit"] = locals_var["remove_temp_dir_on_exit"] - dict_["log_apdl"] = locals_var["log_apdl"] - dict_["use_vtk"] = locals_var["use_vtk"] - dict_["start_parm"] = locals_var["start_parm"] - dict_["start_instance"] = locals_var["start_instance"] - dict_["version"] = locals_var["version"] - dict_["additional_switches"] = locals_var["additional_switches"] - return dict_ + Raises + ------ + ValueError + When an argument is not allowed. + """ + kwargs = list(args["kwargs"].keys()) + + # Raising error if using non-allowed arguments + for each in kwargs.copy(): + if each in _ALLOWED_START_PARM or each in ALLOWABLE_LAUNCH_MAPDL_ARGS: + kwargs.remove(each) + + if kwargs: + ms_ = ", ".join([f"'{each}'" for each in args["kwargs"].keys()]) + raise ValueError(f"The following arguments are not recognized: {ms_}") + + +def pre_check_args(args): + if args["start_instance"] and args["ip"] and not args["on_pool"]: + raise ValueError( + "When providing a value for the argument 'ip', the argument " + "'start_instance' cannot be 'True'.\n" + "Make sure the corresponding environment variables are not setting " + "those argument values.\n" + "For more information visit https://github.com/ansys/pymapdl/issues/2910" + ) + + if args["exec_file"] and args["version"]: + raise ValueError("Cannot specify both ``exec_file`` and ``version``.") + + +def get_cpus(args: Dict[str, Any]): + """Get number of CPUs + + Parameters + ---------- + args : Dict[str, Any] + Arguments dict + + Raises + ------ + NotEnoughResources + When requesting more CPUs than available. + """ + + # Bypassing number of processors checks because VDI/VNC might have + # different number of processors than the cluster compute nodes. + if args["ON_SLURM"]: + return + + # Setting number of processors + machine_cores = psutil.cpu_count(logical=False) + + if not args["nproc"]: + # Some machines only have 1 core + args["nproc"] = machine_cores if machine_cores < 2 else 2 + else: + if machine_cores < int(args["nproc"]): + raise NotEnoughResources( + f"The machine has {machine_cores} cores. PyMAPDL is asking for {args['nproc']} cores." + ) + + +def remove_err_files(run_location, jobname): + # remove any temporary error files at the run location. This is + # important because we need to know if MAPDL is already running + # here and because we're looking for any temporary files that are + # created to tell when the process has started + for filename in os.listdir(run_location): + if ".err" == filename[-4:] and jobname in filename: + filename = os.path.join(run_location, filename) + if os.path.isfile(filename): + try: + os.remove(filename) + LOG.debug(f"Removing temporary error file: {filename}") + except Exception as error: + LOG.debug( + f"Unable to remove {filename}. There might be " + "an instance of MAPDL running at running at " + f'"{run_location}"' + ) + raise error diff --git a/src/ansys/mapdl/core/mapdl_core.py b/src/ansys/mapdl/core/mapdl_core.py index c4e37ee54d..2bfffcad69 100644 --- a/src/ansys/mapdl/core/mapdl_core.py +++ b/src/ansys/mapdl/core/mapdl_core.py @@ -163,6 +163,23 @@ GUI_FONT_SIZE = 15 LOG_APDL_DEFAULT_FILE_NAME = "apdl.log" +_ALLOWED_START_PARM = [ + "additional_switches", + "check_parameter_names", + "exec_file", + "ip", + "jobname", + "nproc", + "override", + "port", + "print_com", + "process", + "ram", + "run_location", + "start_timeout", + "timeout", +] + def parse_to_short_cmd(command): """Takes any MAPDL command and returns the first 4 characters of @@ -195,25 +212,6 @@ def setup_logger(loglevel="INFO", log_file=True, mapdl_instance=None): return setup_logger.log -_ALLOWED_START_PARM = [ - "additional_switches", - "exec_file", - "ip", - "jobname", - "local", - "nproc", - "override", - "port", - "print_com", - "process", - "ram", - "run_location", - "start_timeout", - "timeout", - "check_parameter_names", -] - - def _sanitize_start_parm(start_parm): for each_key in start_parm: if each_key not in _ALLOWED_START_PARM: @@ -263,7 +261,8 @@ def __init__( else: # pragma: no cover if use_vtk: raise ModuleNotFoundError( - "Using the keyword argument 'use_vtk' requires having 'ansys-tools-visualization_interface' installed." + "Using the keyword argument 'use_vtk' requires having " + "'ansys-tools-visualization_interface' installed." ) else: self._use_vtk = False @@ -279,13 +278,14 @@ def __init__( self._krylov = None self._on_docker = None self._platform = None + self._path_cache = None # Cache + self._print_com: bool = print_com # print the command /COM input. + # Start_parameters _sanitize_start_parm(start_parm) self._start_parm: Dict[str, Any] = start_parm self._jobname: str = start_parm.get("jobname", "file") self._path: Union[str, pathlib.Path] = start_parm.get("run_location", None) - self._path_cache = None # Cache - self._print_com: bool = print_com # print the command /COM input. self.check_parameter_names = start_parm.get("check_parameter_names", True) # Setting up loggers diff --git a/src/ansys/mapdl/core/mapdl_grpc.py b/src/ansys/mapdl/core/mapdl_grpc.py index a83dfcc7c5..72f4b9e054 100644 --- a/src/ansys/mapdl/core/mapdl_grpc.py +++ b/src/ansys/mapdl/core/mapdl_grpc.py @@ -857,11 +857,25 @@ def _launch(self, start_parm, timeout=10): raise MapdlRuntimeError( "Can only launch the GUI with a local instance of MAPDL" ) - from ansys.mapdl.core.launcher import launch_grpc + from ansys.mapdl.core.launcher import generate_mapdl_launch_command, launch_grpc self._exited = False # reset exit state - port, directory, process = launch_grpc(**start_parm) - self._connect(port) + + args = self._start_parm + cmd = generate_mapdl_launch_command( + exec_file=args["exec_file"], + jobname=args["jobname"], + nproc=args["nproc"], + ram=args["ram"], + port=args["port"], + additional_switches=args["additional_switches"], + ) + + process = launch_grpc( + cmd=cmd, run_location=args["run_location"], env_vars=self._env_vars or None + ) + + self._connect(args["port"]) # may need to wait for viable connection in open_gui case tmax = time.time() + timeout @@ -877,6 +891,9 @@ def _launch(self, start_parm, timeout=10): if not success: raise MapdlConnectionError("Unable to reconnect to MAPDL") + # Update process + self._mapdl_process = process + @supress_logging def _set_no_abort(self): """Do not abort MAPDL.""" @@ -1054,6 +1071,7 @@ def exit(self, save=False, force=False, **kwargs): f"Exiting MAPLD gRPC instance {self.ip}:{self.port} on '{self._path}'." ) + mapdl_path = self.directory # caching if self._exited is None: self._log.debug("'self._exited' is none.") return # Some edge cases the class object is not completely initialized but the __del__ method @@ -1089,7 +1107,6 @@ def exit(self, save=False, force=False, **kwargs): if not kwargs.pop("fake_exit", False): # This cannot/should not be faked if self._local: - mapdl_path = self.directory self._cache_pids() # Recache processes if os.name == "nt": @@ -1106,7 +1123,7 @@ def exit(self, save=False, force=False, **kwargs): # No cover: The CI is working with a single MAPDL instance self._remote_instance.delete() - self._remove_temp_dir_on_exit() + self._remove_temp_dir_on_exit(mapdl_path) if self._local and self._port in _LOCAL_PORTS: _LOCAL_PORTS.remove(self._port) diff --git a/src/ansys/mapdl/core/misc.py b/src/ansys/mapdl/core/misc.py index f732f9712e..ffbd46ac1c 100644 --- a/src/ansys/mapdl/core/misc.py +++ b/src/ansys/mapdl/core/misc.py @@ -49,7 +49,7 @@ from ansys.mapdl import core as pymapdl from ansys.mapdl.core import _HAS_PYVISTA, LOG -from ansys.mapdl.core.errors import MapdlExitedError, MapdlRuntimeError +from ansys.mapdl.core.errors import MapdlExitedError try: import ansys.tools.report as pyansys_report @@ -544,13 +544,9 @@ def get_name(): # create dir: path = os.path.join(tmpdir, name) - try: - os.mkdir(path) - except: # pragma: no cover - raise MapdlRuntimeError( - "Unable to create temporary working " - f"directory {path}\nPlease specify 'run_location' argument" - ) + if not os.path.isdir(path): + os.makedirs(path) + LOG.debug(f"Created run location at {path}") return path diff --git a/src/ansys/mapdl/core/pool.py b/src/ansys/mapdl/core/pool.py index 114b11cf03..49b0d4903c 100755 --- a/src/ansys/mapdl/core/pool.py +++ b/src/ansys/mapdl/core/pool.py @@ -367,9 +367,9 @@ def __init__( [thread.join() for thread in threads] # make sure everything is ready - timeout = time.time() + timeout - - while timeout > time.time(): + n_instances_ready = 0 + time_end = time.time() + timeout + while time_end > time.time(): n_instances_ready = sum([each is not None for each in self._instances]) if n_instances_ready == n_instances: @@ -378,7 +378,7 @@ def __init__( time.sleep(0.1) else: raise TimeoutError( - f"Only {n_instances_ready} of {n_instances} could be started." + f"Only {n_instances_ready} of {n_instances} could be started after {timeout} seconds." ) if pbar is not None: @@ -910,6 +910,9 @@ def _spawn_mapdl( if not run_location: run_location = create_temp_dir(self._root_dir, name=name) + if self._spawn_kwargs.get("_debug_no_launch", False): + return + self._instances[index] = launch_mapdl( exec_file=exec_file, run_location=run_location, @@ -1050,7 +1053,9 @@ def _set_n_instance_ip_port_args(self, n_instances, ip, port): "Argument 'port' does not support this type of argument." ) else: - raise TypeError("Argument 'ip' does not support this type of argument.") + raise TypeError( + f"Argument 'ip' does not support this type of argument ({type(ip)})." + ) else: @@ -1079,7 +1084,7 @@ def _set_n_instance_ip_port_args(self, n_instances, ip, port): ports = port else: raise TypeError( - "Argument 'port' does not support this type of argument." + f"Argument 'port' does not support this type of argument ({type(port)})." ) elif isinstance(ip, str): diff --git a/tests/common.py b/tests/common.py index 91fa9b12b1..749da16324 100644 --- a/tests/common.py +++ b/tests/common.py @@ -44,15 +44,15 @@ # Set if on local def is_on_local(): - if "ON_LOCAL" in os.environ: - return os.environ.get("ON_LOCAL", "").lower() == "true" + if os.environ.get("ON_LOCAL"): + return os.environ.get("ON_LOCAL").lower() == "true" - if "ON_REMOTE" in os.environ: - return os.environ.get("ON_REMOTE", "").lower() == "true" + if os.environ.get("ON_REMOTE"): + return os.environ.get("ON_REMOTE").lower() == "true" - if os.environ.get("PYMAPDL_START_INSTANCE", None): + if os.environ.get("PYMAPDL_START_INSTANCE"): return ( - os.environ.get("PYMAPDL_START_INSTANCE", "").lower() != "false" + os.environ.get("PYMAPDL_START_INSTANCE").lower() != "false" ) # default is false from ansys.tools.path import find_mapdl @@ -72,7 +72,7 @@ def is_on_ci(): # Set if on ubuntu def is_on_ubuntu(): - envvar = os.environ.get("ON_UBUNTU", None) + envvar = os.environ.get("ON_UBUNTU") if envvar is not None: return envvar.lower() == "true" @@ -81,7 +81,7 @@ def is_on_ubuntu(): def has_grpc(): - envvar = os.environ.get("HAS_GRPC", None) + envvar = os.environ.get("HAS_GRPC") if envvar is not None: return envvar.lower().strip() == "true" @@ -104,7 +104,7 @@ def has_grpc(): def has_dpf(): - return bool(os.environ.get("DPF_PORT", "")) + return bool(os.environ.get("DPF_PORT")) def is_smp(): @@ -112,7 +112,7 @@ def is_smp(): def support_plotting(): - envvar = os.environ.get("SUPPORT_PLOTTING", None) + envvar = os.environ.get("SUPPORT_PLOTTING") if envvar is not None: return envvar.lower().strip() == "true" @@ -138,7 +138,7 @@ def testing_minimal(): def log_apdl() -> bool: - if "PYMAPDL_LOG_APDL" in os.environ and os.environ.get("PYMAPDL_LOG_APDL", ""): + if os.environ.get("PYMAPDL_LOG_APDL"): log_apdl = os.environ.get("PYMAPDL_LOG_APDL") if log_apdl.lower() in ["true", "false", "yes", "no"]: diff --git a/tests/conftest.py b/tests/conftest.py index 6c812d8fff..d531594fdd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -121,7 +121,7 @@ skip_if_running_student_version = pytest.mark.skipif( ON_STUDENT, - reason="This tests does not work on student version. Maybe because license limitations", + reason="This tests does not work on student version.", ) @@ -263,9 +263,12 @@ def requires_dependency(dependency: str): {os_msg} -If you do have Ansys installed, you may have to patch pymapdl to -automatically find your Ansys installation. Email the developer at: -alexander.kaszynski@ansys.com +If you do have Ansys installed, you may have to patch PyMAPDL to +automatically find your Ansys installation. + +You can request assistance by opening an issue on: + +https://github.com/ansys/pymapdl/issues """ MAPDL_VERSION = None # this is cached by mapdl fixture and used in the minimal testing @@ -301,7 +304,7 @@ def pytest_report_header(config, start_path, startdir): "DPF_START_SERVER", "IGNORE_POOL", ]: - env_var_value = os.environ.get(env_var, None) + env_var_value = os.environ.get(env_var) if env_var_value is not None: line += f"{env_var} ('{env_var_value}'), " text += [line] @@ -451,6 +454,7 @@ def run_before_and_after_tests( yield # this is where the testing happens assert prev == mapdl.is_local + assert not mapdl.exited make_sure_not_instances_are_left_open() @@ -500,6 +504,7 @@ def is_exited(mapdl: Mapdl): # Restoring the local configuration mapdl._local = local_ + mapdl._exited = False return mapdl @@ -632,6 +637,7 @@ def mapdl(request, tmpdir_factory): ########################################################################### if START_INSTANCE: mapdl._local = True + mapdl._exited = False mapdl.exit(save=True, force=True) assert mapdl._exited assert "MAPDL exited" in str(mapdl) diff --git a/tests/test_launcher.py b/tests/test_launcher.py index 86e6dc8b54..2849aed0ab 100644 --- a/tests/test_launcher.py +++ b/tests/test_launcher.py @@ -23,7 +23,9 @@ """Test the mapdl launcher""" import os +import subprocess import tempfile +from unittest.mock import patch import warnings import psutil @@ -31,21 +33,28 @@ from ansys.mapdl import core as pymapdl from ansys.mapdl.core.errors import ( - LicenseServerConnectionError, NotEnoughResources, PortAlreadyInUseByAnMAPDLInstance, ) from ansys.mapdl.core.launcher import ( + _HAS_ATP, LOCALHOST, - _check_license_argument, - _force_smp_student_version, _is_ubuntu, _parse_ip_route, - _parse_slurm_options, - _validate_MPI, - _verify_version, + force_smp_in_student, + generate_mapdl_launch_command, + generate_start_parameters, + get_exec_file, + get_run_location, + get_slurm_options, get_start_instance, + get_version, + is_on_slurm, + launch_grpc, launch_mapdl, + remove_err_files, + set_license_switch, + set_MPI_additional_switches, update_env_vars, ) from ansys.mapdl.core.licensing import LICENSES @@ -100,13 +109,13 @@ def test_validate_sw(): # ensure that windows adds msmpi # fake windows path exec_path = "C:/Program Files/ANSYS Inc/v211/ansys/bin/win64/ANSYS211.exe" - add_sw = _validate_MPI("", exec_path) + add_sw = set_MPI_additional_switches("", exec_path) assert "msmpi" in add_sw - add_sw = _validate_MPI("-mpi intelmpi", exec_path) + add_sw = set_MPI_additional_switches("-mpi intelmpi", exec_path) assert "msmpi" in add_sw and "intelmpi" not in add_sw - add_sw = _validate_MPI("-mpi INTELMPI", exec_path) + add_sw = set_MPI_additional_switches("-mpi INTELMPI", exec_path) assert "msmpi" in add_sw and "INTELMPI" not in add_sw @@ -180,9 +189,13 @@ def test_launch_console(version): @requires("local") @requires("nostudent") +@requires("ansys-tools-path") @pytest.mark.parametrize("license_name", LICENSES) -def test_license_type_keyword_names(mapdl, license_name): - args = launch_mapdl(license_type=license_name, _debug_no_launch=True) +def test_license_type_keyword_names(mapdl, monkeypatch, license_name): + exec_file = find_ansys()[0] + args = launch_mapdl( + exec_file=exec_file, license_type=license_name, _debug_no_launch=True + ) assert f"-p {license_name}" in args["additional_switches"] @@ -197,14 +210,19 @@ def test_license_type_additional_switch(mapdl, license_name): @requires("ansys-tools-path") -@requires("local") def test_license_type_dummy(mapdl): dummy_license_type = "dummy" - with pytest.raises(LicenseServerConnectionError): + with pytest.warns( + UserWarning, + match="Still PyMAPDL will try to use it but in older MAPDL versions you might experience", + ): launch_mapdl( + start_instance=False, port=mapdl.port + 1, - additional_switches=f" -p {dummy_license_type}" + QUICK_LAUNCH_SWITCHES, + additional_switches=f" -p {dummy_license_type} " + QUICK_LAUNCH_SWITCHES, start_timeout=start_timeout, + license_server_check=False, + _debug_no_launch=True, ) @@ -297,32 +315,32 @@ def test_open_gui( mapdl.open_gui(inplace=inplace, include_result=include_result) -def test__force_smp_student_version(): +def test_force_smp_in_student(): add_sw = "" exec_path = ( r"C:\Program Files\ANSYS Inc\ANSYS Student\v222\ansys\bin\winx64\ANSYS222.exe" ) - assert "-smp" in _force_smp_student_version(add_sw, exec_path) + assert "-smp" in force_smp_in_student(add_sw, exec_path) add_sw = "-mpi" exec_path = ( r"C:\Program Files\ANSYS Inc\ANSYS Student\v222\ansys\bin\winx64\ANSYS222.exe" ) - assert "-smp" not in _force_smp_student_version(add_sw, exec_path) + assert "-smp" not in force_smp_in_student(add_sw, exec_path) add_sw = "-dmp" exec_path = ( r"C:\Program Files\ANSYS Inc\ANSYS Student\v222\ansys\bin\winx64\ANSYS222.exe" ) - assert "-smp" not in _force_smp_student_version(add_sw, exec_path) + assert "-smp" not in force_smp_in_student(add_sw, exec_path) add_sw = "" exec_path = r"C:\Program Files\ANSYS Inc\v222\ansys\bin\winx64\ANSYS222.exe" - assert "-smp" not in _force_smp_student_version(add_sw, exec_path) + assert "-smp" not in force_smp_in_student(add_sw, exec_path) add_sw = "-SMP" exec_path = r"C:\Program Files\ANSYS Inc\v222\ansys\bin\winx64\ANSYS222.exe" - assert "-SMP" in _force_smp_student_version(add_sw, exec_path) + assert "-SMP" in force_smp_in_student(add_sw, exec_path) @pytest.mark.parametrize( @@ -330,19 +348,19 @@ def test__force_smp_student_version(): [[each_key, each_value] for each_key, each_value in LICENSES.items()], ) def test_license_product_argument(license_short, license_name): - additional_switches = _check_license_argument(license_name, "qwer") + additional_switches = set_license_switch(license_name, "qwer") assert f"qwer -p {license_short}" in additional_switches @pytest.mark.parametrize("unvalid_type", [1, {}, ()]) def test_license_product_argument_type_error(unvalid_type): with pytest.raises(TypeError): - _check_license_argument(unvalid_type, "") + set_license_switch(unvalid_type, "") def test_license_product_argument_warning(): with pytest.warns(UserWarning): - assert "-p asdf" in _check_license_argument("asdf", "qwer") + assert "-p asdf" in set_license_switch("asdf", "qwer") @pytest.mark.parametrize( @@ -350,33 +368,37 @@ def test_license_product_argument_warning(): [[each_key, each_value] for each_key, each_value in LICENSES.items()], ) def test_license_product_argument_p_arg(license_short, license_name): - assert f"qw1234 -p {license_short}" == _check_license_argument( + assert f"qw1234 -p {license_short}" == set_license_switch( None, f"qw1234 -p {license_short}" ) def test_license_product_argument_p_arg_warning(): with pytest.warns(UserWarning): - assert "qwer -p asdf" in _check_license_argument(None, "qwer -p asdf") + assert "qwer -p asdf" in set_license_switch(None, "qwer -p asdf") installed_mapdl_versions = [] -installed_mapdl_versions.extend(list(versions.keys())) -installed_mapdl_versions.extend([each / 10 for each in versions.keys()]) +installed_mapdl_versions.extend([int(each) for each in list(versions.keys())]) +installed_mapdl_versions.extend([float(each / 10) for each in versions.keys()]) installed_mapdl_versions.extend([str(each) for each in list(versions.keys())]) installed_mapdl_versions.extend([str(each / 10) for each in versions.keys()]) installed_mapdl_versions.extend(list(versions.values())) +installed_mapdl_versions.extend([None]) @pytest.mark.parametrize("version", installed_mapdl_versions) def test__verify_version_pass(version): - ver = _verify_version(version) - assert isinstance(ver, int) - assert min(versions.keys()) <= ver <= max(versions.keys()) + ver = get_version(version) + if version: + assert isinstance(ver, int) + assert min(versions.keys()) <= ver <= max(versions.keys()) + else: + assert ver is None def test__verify_version_latest(): - assert _verify_version("latest") is None + assert get_version("latest") is None @requires("ansys-tools-path") @@ -414,13 +436,7 @@ def test_version(mapdl): @requires("local") def test_raise_exec_path_and_version_launcher(mapdl): with pytest.raises(ValueError): - launch_mapdl( - exec_file="asdf", - port=mapdl.port + 1, - version="asdf", - start_timeout=start_timeout, - additional_switches=QUICK_LAUNCH_SWITCHES, - ) + get_version("asdf", "asdf") @requires("linux") @@ -472,7 +488,8 @@ def test_launched(mapdl): @requires("local") -def test_launching_on_busy_port(mapdl): +def test_launching_on_busy_port(mapdl, monkeypatch): + monkeypatch.delenv("PYMAPDL_PORT", raising=False) with pytest.raises(PortAlreadyInUseByAnMAPDLInstance): launch_mapdl(port=mapdl.port) @@ -622,25 +639,102 @@ def test_fail_channel_ip(): ), indirect=["set_env_var_context"], ) -def test__parse_slurm_options(set_env_var_context, validation): +def test_get_slurm_options(set_env_var_context, validation): """test slurm env vars""" for each_key, each_value in set_env_var_context.items(): if each_value: assert os.environ.get(each_key) == str(each_value) - exec_file, jobname, nproc, ram, additional_switches = _parse_slurm_options( - exec_file=None, jobname="", nproc=None, ram=None, additional_switches="" - ) - assert nproc == validation["nproc"] + args = { + "exec_file": None, + "jobname": "", + "nproc": None, + "ram": None, + "additional_switches": "", + "start_timeout": 45, + } + kwargs = {} + get_slurm_options(args, kwargs) + assert args["nproc"] == validation["nproc"] + + if args["ram"]: + assert args["ram"] == validation["ram"] + + if args["jobname"] != "file": + assert args["jobname"] == validation["jobname"] + + if args["exec_file"] and validation.get("exec_file", None): + assert args["exec_file"] == validation["exec_file"] + + +@pytest.mark.parametrize( + "ram,expected,context", + [ + ["2048k", 2, NullContext()], + ["10M", 10, NullContext()], + ["100G", 100 * 1024, NullContext()], + ["1T", 1024**2, NullContext()], + ["100", 100, NullContext()], + [ + "100E", + "", + pytest.raises( + ValueError, match="The memory defined in 'SLURM_MEM_PER_NODE' env var" + ), + ], + ], +) +def test_slurm_ram(monkeypatch, ram, expected, context): + monkeypatch.setenv("SLURM_MEM_PER_NODE", ram) + monkeypatch.setenv("PYMAPDL_MAPDL_EXEC", "asdf/qwer/poiu") + + args = { + "exec_file": None, + "jobname": "", + "ram": None, + "nproc": None, + "additional_switches": "", + "start_timeout": 45, + } + with context: + args = get_slurm_options(args, {}) + assert args["ram"] == expected + + +@pytest.mark.parametrize("slurm_env_var", ["True", "false", ""]) +@pytest.mark.parametrize("slurm_job_name", ["True", "false", ""]) +@pytest.mark.parametrize("slurm_job_id", ["True", "false", ""]) +@pytest.mark.parametrize("detect_slurm_config", [True, False, None]) +def test_is_on_slurm( + monkeypatch, slurm_env_var, slurm_job_name, slurm_job_id, detect_slurm_config +): + monkeypatch.setenv("PYMAPDL_ON_SLURM", slurm_env_var) + monkeypatch.setenv("SLURM_JOB_NAME", slurm_job_name) + monkeypatch.setenv("SLURM_JOB_ID", slurm_job_id) + + flag = is_on_slurm(args={"detect_slurm_config": detect_slurm_config}) - if ram: - assert ram == validation["ram"] + if detect_slurm_config is not True: + assert not flag - if jobname != "file": - assert jobname == validation["jobname"] + else: + if slurm_env_var.lower() == "false": + assert not flag - if exec_file and validation.get("exec_file", None): - assert exec_file == validation["exec_file"] + else: + if slurm_job_name != "" and slurm_job_id != "": + assert flag + else: + assert not flag + + if ON_LOCAL: + assert ( + launch_mapdl( + detect_slurm_config=detect_slurm_config, + _debug_no_launch=True, + )["ON_SLURM"] + == flag + ) @pytest.mark.parametrize( @@ -650,7 +744,7 @@ def test__parse_slurm_options(set_env_var_context, validation): pytest.param(False, NullContext(), id="Boolean false"), pytest.param("true", NullContext(), id="String true"), pytest.param("TRue", NullContext(), id="String true weird capitalization"), - pytest.param("2", pytest.raises(OSError), id="String number"), + pytest.param("2", pytest.raises(ValueError), id="String number"), pytest.param(2, pytest.raises(ValueError), id="Int"), ], ) @@ -673,23 +767,27 @@ def test_get_start_instance_argument(monkeypatch, start_instance, context): pytest.param("FaLSE", NullContext()), pytest.param("asdf", pytest.raises(OSError)), pytest.param("1", pytest.raises(OSError)), - pytest.param("", pytest.raises(OSError)), + pytest.param("", NullContext()), ], ) def test_get_start_instance_envvar(monkeypatch, start_instance, context): monkeypatch.setenv("PYMAPDL_START_INSTANCE", start_instance) with context: - if "true" in start_instance.lower(): - assert get_start_instance(start_instance) + if "true" in start_instance.lower() or start_instance == "": + assert get_start_instance(start_instance=None) else: - assert not get_start_instance(start_instance) + assert not get_start_instance(start_instance=None) +@requires("local") +@requires("ansys-tools-path") @pytest.mark.parametrize("start_instance", [True, False]) def test_launcher_start_instance(monkeypatch, start_instance): if "PYMAPDL_START_INSTANCE" in os.environ: monkeypatch.delenv("PYMAPDL_START_INSTANCE") - options = launch_mapdl(start_instance=start_instance, _debug_no_launch=True) + options = launch_mapdl( + exec_file=find_ansys()[0], start_instance=start_instance, _debug_no_launch=True + ) assert start_instance == options["start_instance"] @@ -700,73 +798,317 @@ def test_launcher_start_instance(monkeypatch, start_instance): def test_ip_and_start_instance( monkeypatch, start_instance, start_instance_envvar, ip, ip_envvar ): - # start_instance=False - # start_instance_envvar=True - # ip="" - # ip_envvar="123.1.1.1" - # For more information, visit https://github.com/ansys/pymapdl/issues/2910 + + ################### + # Removing env var coming from CICD. if "PYMAPDL_START_INSTANCE" in os.environ: monkeypatch.delenv("PYMAPDL_START_INSTANCE") + ################### + # Injecting env vars for the test if start_instance_envvar is not None: monkeypatch.setenv("PYMAPDL_START_INSTANCE", str(start_instance_envvar)) if ip_envvar is not None: monkeypatch.setenv("PYMAPDL_IP", str(ip_envvar)) - start_instance_is_true = start_instance_envvar is True or ( - start_instance_envvar is None and (start_instance is True) - ) - - ip_is_true = bool(ip_envvar) or ( - (ip_envvar is None or ip_envvar == "") and bool(ip) - ) - - exceptions = start_instance_envvar is None and start_instance is None and ip_is_true + # Skip if PyMAPDL cannot detect where MAPDL is installed. + if not _HAS_ATP and not os.environ.get("PYMAPDL_MAPDL_EXEC"): + # if start_instance and not ip: + with pytest.raises( + ModuleNotFoundError, + match="If you don't have 'ansys-tools-path' library installed, you need", + ): + options = launch_mapdl( + exec_file=None, + start_instance=start_instance, + ip=ip, + _debug_no_launch=True, + ) + return # Exit early the test - if (start_instance_is_true and ip_is_true) and not exceptions: + ################### + # Exception case: start_instance and ip are passed as args. + if start_instance and ip: with pytest.raises( ValueError, match="When providing a value for the argument 'ip', the argument ", ): options = launch_mapdl( - start_instance=start_instance, ip=ip, _debug_no_launch=True + start_instance=start_instance, + ip=ip, + _debug_no_launch=True, ) + return # Exit early the test + + ################### + # Faking MAPDL launching and returning args + with warnings.catch_warnings(): + options = launch_mapdl( + start_instance=start_instance, + ip=ip, + _debug_no_launch=True, + ) - return # Exit + ################### + # Checking logic + # The start instance arg has precedence over the env var - if ( - isinstance(start_instance_envvar, bool) and isinstance(start_instance, bool) - ) or (ip_envvar and ip): - with pytest.warns(UserWarning): - options = launch_mapdl( - start_instance=start_instance, ip=ip, _debug_no_launch=True - ) + if start_instance is True: + assert options["start_instance"] + elif start_instance is False: + assert not options["start_instance"] else: - with warnings.catch_warnings(): - options = launch_mapdl( - start_instance=start_instance, ip=ip, _debug_no_launch=True - ) + # start_instance is None, checking env var: + if ip or ip_envvar: + assert options["start_instance"] is False - if start_instance_envvar is True: - assert options["start_instance"] is True - elif start_instance_envvar is False: - assert options["start_instance"] is False - else: - if start_instance is None: - if ip_envvar or bool(ip): + elif start_instance_envvar is True: + assert options["start_instance"] is True + + elif start_instance_envvar is False: + assert options["start_instance"] is False + + else: + # start_instance is None. + # No IP env var or arg: + if ip: + # the ip is given either using the env var or the arg: assert not options["start_instance"] else: assert options["start_instance"] - elif start_instance is True: - assert options["start_instance"] - else: - assert not options["start_instance"] if ip_envvar: + # Getting IP from env var assert options["ip"] == ip_envvar else: + # From argument if ip: assert options["ip"] == ip else: - assert options["ip"] in (LOCALHOST, "0.0.0.0") + # Using default + assert options["ip"] in (LOCALHOST, "0.0.0.0", "127.0.0.1") + + +def mycpucount(**kwargs): + return 10 # faking 10 cores + + +@patch("os.name", "nt") +@patch("psutil.cpu_count", mycpucount) +def test_generate_mapdl_launch_command_windows(): + assert os.name == "nt" # Checking mocking is properly done + + exec_file = "C:/Program Files/ANSYS Inc/v242/ansys/bin/winx64/ANSYS242.exe" + jobname = "myjob" + nproc = 10 + port = 1000 + ram = 2 + additional_switches = "-my_add=switch" + + cmd = generate_mapdl_launch_command( + exec_file=exec_file, + jobname=jobname, + nproc=nproc, + port=port, + ram=ram, + additional_switches=additional_switches, + ) + + assert f'"{exec_file}" ' in cmd + assert f" -j {jobname} " in cmd + assert f" -port {port} " in cmd + assert f" -m {ram*1024} " in cmd + assert f" -np {nproc} " in cmd + assert " -grpc" in cmd + assert f" {additional_switches} " in cmd + assert f" -b -i .__tmp__.inp " in cmd + assert f" -o .__tmp__.out " in cmd + + +def test_generate_mapdl_launch_command_linux(): + assert os.name != "nt" # Checking mocking is properly done + + exec_file = "/ansys_inc/v242/ansys/bin/ansys242" + jobname = "myjob" + nproc = 10 + port = 1000 + ram = 2 + additional_switches = "-my_add=switch" + + cmd = generate_mapdl_launch_command( + exec_file=exec_file, + jobname=jobname, + nproc=nproc, + port=port, + ram=ram, + additional_switches=additional_switches, + ) + + assert f'"{exec_file}" ' in cmd + assert f" -j {jobname} " in cmd + assert f" -port {port} " in cmd + assert f" -m {ram*1024} " in cmd + assert f" -np {nproc} " in cmd + assert " -grpc" in cmd + assert f" {additional_switches} " in cmd + + assert f" -i .__tmp__.inp " not in cmd + assert f" -o .__tmp__.out " not in cmd + + +def test_generate_start_parameters_console(): + args = {"mode": "console", "start_timeout": 90} + + new_args = generate_start_parameters(args) + assert "start_timeout" in new_args + assert "ram" not in new_args + assert "override" not in new_args + assert "timeout" not in new_args + + +@patch("ansys.mapdl.core.launcher._HAS_ATP", False) +def test_get_exec_file(monkeypatch): + monkeypatch.delenv("PYMAPDL_MAPDL_EXEC", False) + + args = {"exec_file": None, "start_instance": True} + + with pytest.raises(ModuleNotFoundError): + get_exec_file(args) + + +def test_get_exec_file_not_found(monkeypatch): + monkeypatch.delenv("PYMAPDL_MAPDL_EXEC", False) + + args = {"exec_file": "my/fake/path", "start_instance": True} + + with pytest.raises(FileNotFoundError): + get_exec_file(args) + + +def _get_application_path(*args, **kwargs): + return None + + +@requires("ansys-tools-path") +@patch("ansys.tools.path.path._get_application_path", _get_application_path) +def test_get_exec_file_not_found_two(monkeypatch): + monkeypatch.delenv("PYMAPDL_MAPDL_EXEC", False) + args = {"exec_file": None, "start_instance": True} + with pytest.raises( + FileNotFoundError, match="Invalid exec_file path or cannot load cached " + ): + get_exec_file(args) + + +@pytest.mark.parametrize("run_location", [None, True]) +@pytest.mark.parametrize("remove_temp_dir_on_exit", [None, False, True]) +def test_get_run_location(tmpdir, remove_temp_dir_on_exit, run_location): + if run_location: + new_path = os.path.join(str(tmpdir), "my_new_path") + assert not os.path.exists(new_path) + else: + new_path = None + + args = { + "run_location": new_path, + "remove_temp_dir_on_exit": remove_temp_dir_on_exit, + } + + get_run_location(args) + + assert os.path.exists(args["run_location"]) + + assert "remove_temp_dir_on_exit" in args + + if run_location: + assert not args["remove_temp_dir_on_exit"] + elif remove_temp_dir_on_exit: + assert args["remove_temp_dir_on_exit"] + else: + assert not args["remove_temp_dir_on_exit"] + + +def fake_os_access(*args, **kwargs): + return False + + +@patch("os.access", lambda *args, **kwargs: False) +def test_get_run_location_no_access(tmpdir): + with pytest.raises(IOError, match="Unable to write to ``run_location``:"): + get_run_location({"run_location": str(tmpdir)}) + + +@pytest.mark.parametrize( + "args,match", + [ + [ + {"start_instance": True, "ip": True, "on_pool": False}, + "When providing a value for the argument 'ip', the argument", + ], + [ + {"exec_file": True, "version": True}, + "Cannot specify both ``exec_file`` and ``version``.", + ], + ], +) +def test_pre_check_args(args, match): + with pytest.raises(ValueError, match=match): + launch_mapdl(**args) + + +def test_remove_err_files(tmpdir): + run_location = str(tmpdir) + jobname = "jobname" + err_file = os.path.join(run_location, f"{jobname}.err") + with open(err_file, "w") as fid: + fid.write("Dummy") + + assert os.path.isfile(err_file) + remove_err_files(run_location, jobname) + assert not os.path.isfile(err_file) + + +def myosremove(*args, **kwargs): + raise IOError("Generic error") + + +@patch("os.remove", myosremove) +def test_remove_err_files_fail(tmpdir): + run_location = str(tmpdir) + jobname = "jobname" + err_file = os.path.join(run_location, f"{jobname}.err") + with open(err_file, "w") as fid: + fid.write("Dummy") + + assert os.path.isfile(err_file) + with pytest.raises(IOError): + remove_err_files(run_location, jobname) + assert os.path.isfile(err_file) + + +# testing on windows to account for temp file +def fake_subprocess_open(*args, **kwargs): + kwargs["cmd"] = args[0] + return kwargs + + +@patch("os.name", "nt") +@patch("subprocess.Popen", fake_subprocess_open) +def test_launch_grpc(tmpdir): + cmd = "ansys.exe -b -i my_input.inp -o my_output.inp" + run_location = str(tmpdir) + kwags = launch_grpc(cmd, run_location) + + inp_file = os.path.join(run_location, "my_input.inp") + assert os.path.exists(inp_file) + with open(inp_file, "r") as fid: + assert "FINISH" in fid.read() + + assert cmd == kwags["cmd"] + assert not kwags["shell"] + assert "TRUE" == kwags["env"].pop("ANS_CMD_NODIAG") + assert not kwags["env"] + assert isinstance(kwags["stdin"], type(subprocess.DEVNULL)) + assert isinstance(kwags["stdout"], type(subprocess.PIPE)) + assert isinstance(kwags["stderr"], type(subprocess.PIPE)) diff --git a/tests/test_mapdl.py b/tests/test_mapdl.py index 1de6fe4958..909860f461 100644 --- a/tests/test_mapdl.py +++ b/tests/test_mapdl.py @@ -1835,11 +1835,14 @@ def test_cache_pids(mapdl): if mapdl.version == 23.2: pytest.skip(f"Flaky test in MAPDL 23.2") # I'm not sure why. - assert mapdl._pids - mapdl._cache_pids() # Recache pids + if mapdl.launched: + assert mapdl._pids + mapdl._cache_pids() # Recache pids - for each in mapdl._pids: - assert "ansys" in "".join(psutil.Process(each).cmdline()).lower() + for each in mapdl._pids: + assert "ansys" in "".join(psutil.Process(each).cmdline()).lower() + else: + pytest.skip(f"MAPDL needs to have been launched by PyMAPDL.") @requires("local") diff --git a/tests/test_pool.py b/tests/test_pool.py index a42ffc58f4..75c0900338 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -49,7 +49,7 @@ IGNORE_POOL = os.environ.get("IGNORE_POOL", "").upper() == "TRUE" # skipping if ON_STUDENT and ON_LOCAL because we cannot spawn that many instances. -if ON_STUDENT and ON_LOCAL: +if not ON_LOCAL or (ON_STUDENT and ON_LOCAL): pytest.skip(allow_module_level=True) From a0fb521bc743ee82092799d290550fb822cd77da Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Thu, 17 Oct 2024 18:26:04 +0200 Subject: [PATCH 16/33] feat: adding 'pymapdl_nproc' to non-slurm runs (#3487) * feat: adding 'pymapdl_proc' to non-slurm run. Adding tests too. * chore: adding changelog file 3487.miscellaneous.md [dependabot-skip] --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3487.miscellaneous.md | 1 + src/ansys/mapdl/core/launcher.py | 18 +++++++++------ tests/test_launcher.py | 33 +++++++++++++++++++++++++++ 3 files changed, 45 insertions(+), 7 deletions(-) create mode 100644 doc/changelog.d/3487.miscellaneous.md diff --git a/doc/changelog.d/3487.miscellaneous.md b/doc/changelog.d/3487.miscellaneous.md new file mode 100644 index 0000000000..e9b60408a8 --- /dev/null +++ b/doc/changelog.d/3487.miscellaneous.md @@ -0,0 +1 @@ +feat: adding 'pymapdl_nproc' to non-slurm runs \ No newline at end of file diff --git a/src/ansys/mapdl/core/launcher.py b/src/ansys/mapdl/core/launcher.py index 2c64723498..83d74453be 100644 --- a/src/ansys/mapdl/core/launcher.py +++ b/src/ansys/mapdl/core/launcher.py @@ -2330,20 +2330,24 @@ def get_cpus(args: Dict[str, Any]): # Bypassing number of processors checks because VDI/VNC might have # different number of processors than the cluster compute nodes. + # Also the CPUs are set in `get_slurm_options` if args["ON_SLURM"]: return # Setting number of processors machine_cores = psutil.cpu_count(logical=False) + # Some machines only have 1 core + min_cpus = machine_cores if machine_cores < 2 else 2 + if not args["nproc"]: - # Some machines only have 1 core - args["nproc"] = machine_cores if machine_cores < 2 else 2 - else: - if machine_cores < int(args["nproc"]): - raise NotEnoughResources( - f"The machine has {machine_cores} cores. PyMAPDL is asking for {args['nproc']} cores." - ) + # Check the env var `PYMAPDL_NPROC` + args["nproc"] = int(os.environ.get("PYMAPDL_NPROC", min_cpus)) + + if machine_cores < int(args["nproc"]): + raise NotEnoughResources( + f"The machine has {machine_cores} cores. PyMAPDL is asking for {args['nproc']} cores." + ) def remove_err_files(run_location, jobname): diff --git a/tests/test_launcher.py b/tests/test_launcher.py index 2849aed0ab..e7dc8b4843 100644 --- a/tests/test_launcher.py +++ b/tests/test_launcher.py @@ -44,6 +44,7 @@ force_smp_in_student, generate_mapdl_launch_command, generate_start_parameters, + get_cpus, get_exec_file, get_run_location, get_slurm_options, @@ -1112,3 +1113,35 @@ def test_launch_grpc(tmpdir): assert isinstance(kwags["stdin"], type(subprocess.DEVNULL)) assert isinstance(kwags["stdout"], type(subprocess.PIPE)) assert isinstance(kwags["stderr"], type(subprocess.PIPE)) + + +@patch("psutil.cpu_count", lambda *args, **kwags: 5) +@pytest.mark.parametrize("arg", [None, 3, 10]) +@pytest.mark.parametrize("env", [None, 3, 10]) +def test_get_cpus(monkeypatch, arg, env): + if env: + monkeypatch.setenv("PYMAPDL_NPROC", env) + + context = NullContext() + cores_machine = psutil.cpu_count(logical=False) # it is patched + + if (arg and arg > cores_machine) or (arg is None and env and env > cores_machine): + context = pytest.raises(NotEnoughResources) + + args = {"nproc": arg, "ON_SLURM": False} + with context: + get_cpus(args) + + if arg: + assert args["nproc"] == arg + elif env: + assert args["nproc"] == env + else: + assert args["nproc"] == 2 + + +@patch("psutil.cpu_count", lambda *args, **kwags: 1) +def test_get_cpus_min(): + args = {"nproc": None, "ON_SLURM": False} + get_cpus(args) + assert args["nproc"] == 1 From 82838e970d99a4352e94bf819724acfdb4b08834 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Thu, 17 Oct 2024 18:32:36 +0200 Subject: [PATCH 17/33] refactor: moving information class to another module (#3491) * refactor: moving information to another module * chore: adding changelog file 3491.added.md [dependabot-skip] --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3491.added.md | 1 + doc/source/api/mapdl.rst | 2 +- src/ansys/mapdl/core/__init__.py | 3 +- src/ansys/mapdl/core/information.py | 456 ++++++++++++++++++++++++++++ src/ansys/mapdl/core/mapdl_core.py | 2 +- src/ansys/mapdl/core/misc.py | 432 -------------------------- tests/test_information.py | 69 +++++ tests/test_misc.py | 43 --- 8 files changed, 530 insertions(+), 478 deletions(-) create mode 100644 doc/changelog.d/3491.added.md create mode 100644 src/ansys/mapdl/core/information.py create mode 100644 tests/test_information.py diff --git a/doc/changelog.d/3491.added.md b/doc/changelog.d/3491.added.md new file mode 100644 index 0000000000..9a165ce053 --- /dev/null +++ b/doc/changelog.d/3491.added.md @@ -0,0 +1 @@ +refactor: moving information class to another module \ No newline at end of file diff --git a/doc/source/api/mapdl.rst b/doc/source/api/mapdl.rst index 97b0fef458..5835b572d6 100644 --- a/doc/source/api/mapdl.rst +++ b/doc/source/api/mapdl.rst @@ -67,7 +67,7 @@ Constants ``Information`` class attributes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. currentmodule:: ansys.mapdl.core.misc +.. currentmodule:: ansys.mapdl.core.information .. autosummary:: :toctree: _autosummary diff --git a/src/ansys/mapdl/core/__init__.py b/src/ansys/mapdl/core/__init__.py index 6c50de1e1b..05842784a0 100644 --- a/src/ansys/mapdl/core/__init__.py +++ b/src/ansys/mapdl/core/__init__.py @@ -126,8 +126,9 @@ else: from ansys.mapdl.core.launcher import launch_mapdl +from ansys.mapdl.core.information import Information from ansys.mapdl.core.mapdl_grpc import MapdlGrpc as Mapdl -from ansys.mapdl.core.misc import Information, Report, _check_has_ansys +from ansys.mapdl.core.misc import Report, _check_has_ansys from ansys.mapdl.core.pool import MapdlPool _HAS_ANSYS = _check_has_ansys() diff --git a/src/ansys/mapdl/core/information.py b/src/ansys/mapdl/core/information.py new file mode 100644 index 0000000000..3239b657a7 --- /dev/null +++ b/src/ansys/mapdl/core/information.py @@ -0,0 +1,456 @@ +# Copyright (C) 2016 - 2024 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from functools import wraps +import re +import weakref + +from ansys.mapdl import core as pymapdl + + +def update_information_first(update=False): + """ + Decorator to wrap :class:`Information ` + methods to force update the fields when accessed. + + Parameters + ---------- + update : bool, optional + If ``True``, the class information is updated by calling ``/STATUS`` + before accessing the methods. By default ``False`` + """ + + def decorator(function): + @wraps(function) + def wrapper(self, *args, **kwargs): + if update or not self._stats: + self._update() + return function(self, *args, **kwargs) + + return wrapper + + return decorator + + +class Information: + """ + This class provide some MAPDL information from ``/STATUS`` MAPDL command. + + It is also the object that is called when you issue ``print(mapdl)``, + which means ``print`` calls ``mapdl.info.__str__()``. + + Notes + ----- + You cannot directly modify the values of this class. + + Some of the results are cached for later calls. + + Examples + -------- + >>> mapdl.info + Product: Ansys Mechanical Enterprise + MAPDL Version: 24.1 + ansys.mapdl Version: 0.68.0 + + >>> print(mapdl) + Product: Ansys Mechanical Enterprise + MAPDL Version: 24.1 + ansys.mapdl Version: 0.68.0 + + >>> mapdl.info.product + 'Ansys Mechanical Enterprise' + + >>> info = mapdl.info + >>> info.mapdl_version + 'RELEASE 2021 R2 BUILD 21.2 UPDATE 20210601' + + """ + + def __init__(self, mapdl): + """Class Initializer""" + from ansys.mapdl.core.mapdl import MapdlBase # lazy import to avoid circular + + if not isinstance(mapdl, MapdlBase): # pragma: no cover + raise TypeError("Must be implemented from MAPDL class") + + self._mapdl_weakref = weakref.ref(mapdl) + self._stats = None + self._repr_keys = { + "Product": "product", + "MAPDL Version": "mapdl_version", + "PyMAPDL Version": "pymapdl_version", + } + + @property + def _mapdl(self): + """Return the weakly referenced MAPDL instance.""" + return self._mapdl_weakref() + + def _update(self): + """We might need to do more calls if we implement properties + that change over the MAPDL session.""" + try: + if self._mapdl._exited: # pragma: no cover + raise MapdlExitedError("Information class: MAPDL exited") + + stats = self._mapdl.slashstatus("ALL") + except Exception: # pragma: no cover + self._stats = None + raise MapdlExitedError("Information class: MAPDL exited") + + stats = stats.replace("\n ", "\n") # Bit of formatting + self._stats = stats + self._mapdl._log.debug("Information class: Updated") + + def __repr__(self): + if not self._stats: + self._update() + + return "\n".join( + [ + f"{each_name}:".ljust(25) + f"{getattr(self, each_attr)}".ljust(25) + for each_name, each_attr in self._repr_keys.items() + ] + ) + + @property + @update_information_first(False) + def product(self): + """Retrieve the product from the MAPDL instance.""" + return self._get_product() + + @property + @update_information_first(False) + def mapdl_version(self): + """Retrieve the MAPDL version from the MAPDL instance.""" + return self._get_mapdl_version() + + @property + @update_information_first(False) + def mapdl_version_release(self): + """Retrieve the MAPDL version release from the MAPDL instance.""" + st = self._get_mapdl_version() + return self._get_between("RELEASE", "BUILD", st).strip() + + @property + @update_information_first(False) + def mapdl_version_build(self): + """Retrieve the MAPDL version build from the MAPDL instance.""" + st = self._get_mapdl_version() + return self._get_between("BUILD", "UPDATE", st).strip() + + @property + @update_information_first(False) + def mapdl_version_update(self): + """Retrieve the MAPDL version update from the MAPDL instance.""" + st = self._get_mapdl_version() + return self._get_between("UPDATE", "", st).strip() + + @property + @update_information_first(False) + def pymapdl_version(self): + """Retrieve the PyMAPDL version from the MAPDL instance.""" + return self._get_pymapdl_version() + + @property + @update_information_first(False) + def products(self): + """Retrieve the products from the MAPDL instance.""" + return self._get_products() + + @property + @update_information_first(False) + def preprocessing_capabilities(self): + """Retrieve the preprocessing capabilities from the MAPDL instance.""" + return self._get_preprocessing_capabilities() + + @property + @update_information_first(False) + def aux_capabilities(self): + """Retrieve the aux capabilities from the MAPDL instance.""" + return self._get_aux_capabilities() + + @property + @update_information_first(True) + def solution_options(self): + """Retrieve the solution options from the MAPDL instance.""" + return self._get_solution_options() + + @property + @update_information_first(False) + def post_capabilities(self): + """Retrieve the post capabilities from the MAPDL instance.""" + return self._get_post_capabilities() + + @property + @update_information_first(True) + def titles(self): + """Retrieve the titles from the MAPDL instance.""" + return self._get_titles() + + @property + @update_information_first(True) + def title(self): + """Retrieve and set the title from the MAPDL instance.""" + return self._mapdl.inquire("", "title") + + @title.setter + def title(self, title): + return self._mapdl.run(f"/TITLE, {title}") + + @property + @update_information_first(True) + def stitles(self, i=None): + """Retrieve or set the value for the MAPDL stitle (subtitles). + + If 'stitle' includes newline characters (`\\n`), then each line + is assigned to one STITLE. + + If 'stitle' is equals ``None``, the stitles are reset. + + If ``i`` is supplied, only set the stitle number i. + + Starting from 0 up to 3 (Python indexing). + """ + if not i: + return self._get_stitles() + else: + return self._get_stitles()[i] + + @stitles.setter + def stitles(self, stitle, i=None): + if stitle is None: + # Case to empty + stitle = ["", "", "", ""] + + if not isinstance(stitle, (str, list)): + raise ValueError("Only str or list are allowed for stitle") + + if isinstance(stitle, str): + if "\n" in stitle: + stitle = stitle.splitlines() + else: + stitle = "\n".join( + [stitle[ii : ii + 70] for ii in range(0, len(stitle), 70)] + ) + + if any([len(each) > 70 for each in stitle]): + raise ValueError("The number of characters per subtitle is limited to 70.") + + if not i: + for each_index, each_stitle in zip(range(1, 5), stitle): + self._mapdl.stitle(each_index, each_stitle) + else: + self._mapdl.stitle(i, stitle) + + @property + @update_information_first(True) + def units(self): + """Retrieve the units from the MAPDL instance.""" + return self._get_units() + + @property + @update_information_first(True) + def scratch_memory_status(self): + """Retrieve the scratch memory status from the MAPDL instance.""" + return self._get_scratch_memory_status() + + @property + @update_information_first(True) + def database_status(self): + """Retrieve the database status from the MAPDL instance.""" + return self._get_database_status() + + @property + @update_information_first(True) + def config_values(self): + """Retrieve the config values from the MAPDL instance.""" + return self._get_config_values() + + @property + @update_information_first(True) + def global_status(self): + """Retrieve the global status from the MAPDL instance.""" + return self._get_global_status() + + @property + @update_information_first(True) + def job_information(self): + """Retrieve the job information from the MAPDL instance.""" + return self._get_job_information() + + @property + @update_information_first(True) + def model_information(self): + """Retrieve the model information from the MAPDL instance.""" + return self._get_model_information() + + @property + @update_information_first(True) + def boundary_condition_information(self): + """Retrieve the boundary condition information from the MAPDL instance.""" + return self._get_boundary_condition_information() + + @property + @update_information_first(True) + def routine_information(self): + """Retrieve the routine information from the MAPDL instance.""" + return self._get_routine_information() + + @property + @update_information_first(True) + def solution_options_configuration(self): + """Retrieve the solution options configuration from the MAPDL instance.""" + return self._get_solution_options_configuration() + + @property + @update_information_first(True) + def load_step_options(self): + """Retrieve the load step options from the MAPDL instance.""" + return self._get_load_step_options() + + def _get_between(self, init_string, end_string=None, string=None): + if not string: + self._update() + string = self._stats + + st = string.find(init_string) + len(init_string) + + if not end_string: + en = None + else: + en = string.find(end_string) + return "\n".join(string[st:en].splitlines()).strip() + + def _get_product(self): + return self._get_products().splitlines()[0] + + def _get_mapdl_version(self): + titles_ = self._get_titles() + st = titles_.find("RELEASE") + en = titles_.find("INITIAL", st) + return titles_[st:en].split("CUSTOMER")[0].strip() + + def _get_pymapdl_version(self): + return pymapdl.__version__ + + def _get_title(self): + match = re.match(r"TITLE=(.*)$", self._get_titles()) + if match: + return match.groups(1)[0].strip() + + def _get_stitles(self): + return [ + ( + re.search(f"SUBTITLE {i}=(.*)", self._get_titles()) + .groups(1)[0] + .strip() + if re.search(f"SUBTITLE {i}=(.*)", self._get_titles()) + else "" + ) + for i in range(1, 5) + ] + + def _get_products(self): + init_ = "*** Products ***" + end_string = "*** PreProcessing Capabilities ***" + return self._get_between(init_, end_string) + + def _get_preprocessing_capabilities(self): + init_ = "*** PreProcessing Capabilities ***" + end_string = "*** Aux Capabilities ***" + return self._get_between(init_, end_string) + + def _get_aux_capabilities(self): + init_ = "*** Aux Capabilities ***" + end_string = "*** Solution Options ***" + return self._get_between(init_, end_string) + + def _get_solution_options(self): + init_ = "*** Solution Options ***" + end_string = "*** Post Capabilities ***" + return self._get_between(init_, end_string) + + def _get_post_capabilities(self): + init_ = "*** Post Capabilities ***" + end_string = "***** TITLES *****" + return self._get_between(init_, end_string) + + def _get_titles(self): + init_ = "***** TITLES *****" + end_string = "***** UNITS *****" + return self._get_between(init_, end_string) + + def _get_units(self): + init_ = "***** UNITS *****" + end_string = "***** SCRATCH MEMORY STATUS *****" + return self._get_between(init_, end_string) + + def _get_scratch_memory_status(self): + init_ = "***** SCRATCH MEMORY STATUS *****" + end_string = "***** DATABASE STATUS *****" + return self._get_between(init_, end_string) + + def _get_database_status(self): + init_ = "***** DATABASE STATUS *****" + end_string = "***** CONFIG VALUES *****" + return self._get_between(init_, end_string) + + def _get_config_values(self): + init_ = "***** CONFIG VALUES *****" + end_string = "G L O B A L S T A T U S" + return self._get_between(init_, end_string) + + def _get_global_status(self): + init_ = "G L O B A L S T A T U S" + end_string = "J O B I N F O R M A T I O N" + return self._get_between(init_, end_string) + + def _get_job_information(self): + init_ = "J O B I N F O R M A T I O N" + end_string = "M O D E L I N F O R M A T I O N" + return self._get_between(init_, end_string) + + def _get_model_information(self): + init_ = "M O D E L I N F O R M A T I O N" + end_string = "B O U N D A R Y C O N D I T I O N I N F O R M A T I O N" + return self._get_between(init_, end_string) + + def _get_boundary_condition_information(self): + init_ = "B O U N D A R Y C O N D I T I O N I N F O R M A T I O N" + end_string = "R O U T I N E I N F O R M A T I O N" + return self._get_between(init_, end_string) + + def _get_routine_information(self): + init_ = "R O U T I N E I N F O R M A T I O N" + end_string = None + return self._get_between(init_, end_string) + + def _get_solution_options_configuration(self): + init_ = "S O L U T I O N O P T I O N S" + end_string = "L O A D S T E P O P T I O N S" + return self._get_between(init_, end_string) + + def _get_load_step_options(self): + init_ = "L O A D S T E P O P T I O N S" + end_string = None + return self._get_between(init_, end_string) diff --git a/src/ansys/mapdl/core/mapdl_core.py b/src/ansys/mapdl/core/mapdl_core.py index 2bfffcad69..8fec578281 100644 --- a/src/ansys/mapdl/core/mapdl_core.py +++ b/src/ansys/mapdl/core/mapdl_core.py @@ -61,10 +61,10 @@ MapdlInvalidRoutineError, MapdlRuntimeError, ) +from ansys.mapdl.core.information import Information from ansys.mapdl.core.inline_functions import Query from ansys.mapdl.core.mapdl_types import MapdlFloat from ansys.mapdl.core.misc import ( - Information, check_valid_routine, last_created, random_string, diff --git a/src/ansys/mapdl/core/misc.py b/src/ansys/mapdl/core/misc.py index ffbd46ac1c..28609486ea 100644 --- a/src/ansys/mapdl/core/misc.py +++ b/src/ansys/mapdl/core/misc.py @@ -29,14 +29,12 @@ from pathlib import Path import platform import random -import re import socket import string import tempfile from threading import Thread from typing import Union from warnings import warn -import weakref try: from ansys.tools.path import get_available_ansys_installations @@ -49,7 +47,6 @@ from ansys.mapdl import core as pymapdl from ansys.mapdl.core import _HAS_PYVISTA, LOG -from ansys.mapdl.core.errors import MapdlExitedError try: import ansys.tools.report as pyansys_report @@ -660,435 +657,6 @@ def check_valid_port(port, lower_bound=1000, high_bound=60000): ) -def update_information_first(update=False): - """ - Decorator to wrap :class:`Information ` - methods to force update the fields when accessed. - - Parameters - ---------- - update : bool, optional - If ``True``, the class information is updated by calling ``/STATUS`` - before accessing the methods. By default ``False`` - """ - - def decorator(function): - @wraps(function) - def wrapper(self, *args, **kwargs): - if update or not self._stats: - self._update() - return function(self, *args, **kwargs) - - return wrapper - - return decorator - - -class Information: - """ - This class provide some MAPDL information from ``/STATUS`` MAPDL command. - - It is also the object that is called when you issue ``print(mapdl)``, - which means ``print`` calls ``mapdl.info.__str__()``. - - Notes - ----- - You cannot directly modify the values of this class. - - Some of the results are cached for later calls. - - Examples - -------- - >>> mapdl.info - Product: Ansys Mechanical Enterprise - MAPDL Version: 24.1 - ansys.mapdl Version: 0.68.0 - - >>> print(mapdl) - Product: Ansys Mechanical Enterprise - MAPDL Version: 24.1 - ansys.mapdl Version: 0.68.0 - - >>> mapdl.info.product - 'Ansys Mechanical Enterprise' - - >>> info = mapdl.info - >>> info.mapdl_version - 'RELEASE 2021 R2 BUILD 21.2 UPDATE 20210601' - - """ - - def __init__(self, mapdl): - """Class Initializer""" - from ansys.mapdl.core.mapdl import MapdlBase # lazy import to avoid circular - - if not isinstance(mapdl, MapdlBase): # pragma: no cover - raise TypeError("Must be implemented from MAPDL class") - - self._mapdl_weakref = weakref.ref(mapdl) - self._stats = None - self._repr_keys = { - "Product": "product", - "MAPDL Version": "mapdl_version", - "PyMAPDL Version": "pymapdl_version", - } - - @property - def _mapdl(self): - """Return the weakly referenced MAPDL instance.""" - return self._mapdl_weakref() - - def _update(self): - """We might need to do more calls if we implement properties - that change over the MAPDL session.""" - try: - if self._mapdl._exited: # pragma: no cover - raise MapdlExitedError("Information class: MAPDL exited") - - stats = self._mapdl.slashstatus("ALL") - except Exception: # pragma: no cover - self._stats = None - raise MapdlExitedError("Information class: MAPDL exited") - - stats = stats.replace("\n ", "\n") # Bit of formatting - self._stats = stats - self._mapdl._log.debug("Information class: Updated") - - def __repr__(self): - if not self._stats: - self._update() - - return "\n".join( - [ - f"{each_name}:".ljust(25) + f"{getattr(self, each_attr)}".ljust(25) - for each_name, each_attr in self._repr_keys.items() - ] - ) - - @property - @update_information_first(False) - def product(self): - """Retrieve the product from the MAPDL instance.""" - return self._get_product() - - @property - @update_information_first(False) - def mapdl_version(self): - """Retrieve the MAPDL version from the MAPDL instance.""" - return self._get_mapdl_version() - - @property - @update_information_first(False) - def mapdl_version_release(self): - """Retrieve the MAPDL version release from the MAPDL instance.""" - st = self._get_mapdl_version() - return self._get_between("RELEASE", "BUILD", st).strip() - - @property - @update_information_first(False) - def mapdl_version_build(self): - """Retrieve the MAPDL version build from the MAPDL instance.""" - st = self._get_mapdl_version() - return self._get_between("BUILD", "UPDATE", st).strip() - - @property - @update_information_first(False) - def mapdl_version_update(self): - """Retrieve the MAPDL version update from the MAPDL instance.""" - st = self._get_mapdl_version() - return self._get_between("UPDATE", "", st).strip() - - @property - @update_information_first(False) - def pymapdl_version(self): - """Retrieve the PyMAPDL version from the MAPDL instance.""" - return self._get_pymapdl_version() - - @property - @update_information_first(False) - def products(self): - """Retrieve the products from the MAPDL instance.""" - return self._get_products() - - @property - @update_information_first(False) - def preprocessing_capabilities(self): - """Retrieve the preprocessing capabilities from the MAPDL instance.""" - return self._get_preprocessing_capabilities() - - @property - @update_information_first(False) - def aux_capabilities(self): - """Retrieve the aux capabilities from the MAPDL instance.""" - return self._get_aux_capabilities() - - @property - @update_information_first(True) - def solution_options(self): - """Retrieve the solution options from the MAPDL instance.""" - return self._get_solution_options() - - @property - @update_information_first(False) - def post_capabilities(self): - """Retrieve the post capabilities from the MAPDL instance.""" - return self._get_post_capabilities() - - @property - @update_information_first(True) - def titles(self): - """Retrieve the titles from the MAPDL instance.""" - return self._get_titles() - - @property - @update_information_first(True) - def title(self): - """Retrieve and set the title from the MAPDL instance.""" - return self._mapdl.inquire("", "title") - - @title.setter - def title(self, title): - return self._mapdl.run(f"/TITLE, {title}") - - @property - @update_information_first(True) - def stitles(self, i=None): - """Retrieve or set the value for the MAPDL stitle (subtitles). - - If 'stitle' includes newline characters (`\\n`), then each line - is assigned to one STITLE. - - If 'stitle' is equals ``None``, the stitles are reset. - - If ``i`` is supplied, only set the stitle number i. - - Starting from 0 up to 3 (Python indexing). - """ - if not i: - return self._get_stitles() - else: - return self._get_stitles()[i] - - @stitles.setter - def stitles(self, stitle, i=None): - if stitle is None: - # Case to empty - stitle = ["", "", "", ""] - - if not isinstance(stitle, (str, list)): - raise ValueError("Only str or list are allowed for stitle") - - if isinstance(stitle, str): - if "\n" in stitle: - stitle = stitle.splitlines() - else: - stitle = "\n".join( - [stitle[ii : ii + 70] for ii in range(0, len(stitle), 70)] - ) - - if any([len(each) > 70 for each in stitle]): - raise ValueError("The number of characters per subtitle is limited to 70.") - - if not i: - for each_index, each_stitle in zip(range(1, 5), stitle): - self._mapdl.stitle(each_index, each_stitle) - else: - self._mapdl.stitle(i, stitle) - - @property - @update_information_first(True) - def units(self): - """Retrieve the units from the MAPDL instance.""" - return self._get_units() - - @property - @update_information_first(True) - def scratch_memory_status(self): - """Retrieve the scratch memory status from the MAPDL instance.""" - return self._get_scratch_memory_status() - - @property - @update_information_first(True) - def database_status(self): - """Retrieve the database status from the MAPDL instance.""" - return self._get_database_status() - - @property - @update_information_first(True) - def config_values(self): - """Retrieve the config values from the MAPDL instance.""" - return self._get_config_values() - - @property - @update_information_first(True) - def global_status(self): - """Retrieve the global status from the MAPDL instance.""" - return self._get_global_status() - - @property - @update_information_first(True) - def job_information(self): - """Retrieve the job information from the MAPDL instance.""" - return self._get_job_information() - - @property - @update_information_first(True) - def model_information(self): - """Retrieve the model information from the MAPDL instance.""" - return self._get_model_information() - - @property - @update_information_first(True) - def boundary_condition_information(self): - """Retrieve the boundary condition information from the MAPDL instance.""" - return self._get_boundary_condition_information() - - @property - @update_information_first(True) - def routine_information(self): - """Retrieve the routine information from the MAPDL instance.""" - return self._get_routine_information() - - @property - @update_information_first(True) - def solution_options_configuration(self): - """Retrieve the solution options configuration from the MAPDL instance.""" - return self._get_solution_options_configuration() - - @property - @update_information_first(True) - def load_step_options(self): - """Retrieve the load step options from the MAPDL instance.""" - return self._get_load_step_options() - - def _get_between(self, init_string, end_string=None, string=None): - if not string: - self._update() - string = self._stats - - st = string.find(init_string) + len(init_string) - - if not end_string: - en = None - else: - en = string.find(end_string) - return "\n".join(string[st:en].splitlines()).strip() - - def _get_product(self): - return self._get_products().splitlines()[0] - - def _get_mapdl_version(self): - titles_ = self._get_titles() - st = titles_.find("RELEASE") - en = titles_.find("INITIAL", st) - return titles_[st:en].split("CUSTOMER")[0].strip() - - def _get_pymapdl_version(self): - return pymapdl.__version__ - - def _get_title(self): - match = re.match(r"TITLE=(.*)$", self._get_titles()) - if match: - return match.groups(1)[0].strip() - - def _get_stitles(self): - return [ - ( - re.search(f"SUBTITLE {i}=(.*)", self._get_titles()) - .groups(1)[0] - .strip() - if re.search(f"SUBTITLE {i}=(.*)", self._get_titles()) - else "" - ) - for i in range(1, 5) - ] - - def _get_products(self): - init_ = "*** Products ***" - end_string = "*** PreProcessing Capabilities ***" - return self._get_between(init_, end_string) - - def _get_preprocessing_capabilities(self): - init_ = "*** PreProcessing Capabilities ***" - end_string = "*** Aux Capabilities ***" - return self._get_between(init_, end_string) - - def _get_aux_capabilities(self): - init_ = "*** Aux Capabilities ***" - end_string = "*** Solution Options ***" - return self._get_between(init_, end_string) - - def _get_solution_options(self): - init_ = "*** Solution Options ***" - end_string = "*** Post Capabilities ***" - return self._get_between(init_, end_string) - - def _get_post_capabilities(self): - init_ = "*** Post Capabilities ***" - end_string = "***** TITLES *****" - return self._get_between(init_, end_string) - - def _get_titles(self): - init_ = "***** TITLES *****" - end_string = "***** UNITS *****" - return self._get_between(init_, end_string) - - def _get_units(self): - init_ = "***** UNITS *****" - end_string = "***** SCRATCH MEMORY STATUS *****" - return self._get_between(init_, end_string) - - def _get_scratch_memory_status(self): - init_ = "***** SCRATCH MEMORY STATUS *****" - end_string = "***** DATABASE STATUS *****" - return self._get_between(init_, end_string) - - def _get_database_status(self): - init_ = "***** DATABASE STATUS *****" - end_string = "***** CONFIG VALUES *****" - return self._get_between(init_, end_string) - - def _get_config_values(self): - init_ = "***** CONFIG VALUES *****" - end_string = "G L O B A L S T A T U S" - return self._get_between(init_, end_string) - - def _get_global_status(self): - init_ = "G L O B A L S T A T U S" - end_string = "J O B I N F O R M A T I O N" - return self._get_between(init_, end_string) - - def _get_job_information(self): - init_ = "J O B I N F O R M A T I O N" - end_string = "M O D E L I N F O R M A T I O N" - return self._get_between(init_, end_string) - - def _get_model_information(self): - init_ = "M O D E L I N F O R M A T I O N" - end_string = "B O U N D A R Y C O N D I T I O N I N F O R M A T I O N" - return self._get_between(init_, end_string) - - def _get_boundary_condition_information(self): - init_ = "B O U N D A R Y C O N D I T I O N I N F O R M A T I O N" - end_string = "R O U T I N E I N F O R M A T I O N" - return self._get_between(init_, end_string) - - def _get_routine_information(self): - init_ = "R O U T I N E I N F O R M A T I O N" - end_string = None - return self._get_between(init_, end_string) - - def _get_solution_options_configuration(self): - init_ = "S O L U T I O N O P T I O N S" - end_string = "L O A D S T E P O P T I O N S" - return self._get_between(init_, end_string) - - def _get_load_step_options(self): - init_ = "L O A D S T E P O P T I O N S" - end_string = None - return self._get_between(init_, end_string) - - def write_array(filename: Union[str, bytes], array: np.ndarray): """ Write an array to a file. diff --git a/tests/test_information.py b/tests/test_information.py new file mode 100644 index 0000000000..25ca76d4d5 --- /dev/null +++ b/tests/test_information.py @@ -0,0 +1,69 @@ +# Copyright (C) 2016 - 2024 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Test the information module""" + +import inspect + +import pytest + + +def test_mapdl_info(mapdl, capfd): + info = mapdl.info + for attr, value in inspect.getmembers(info): + if not attr.startswith("_") and attr not in ["title", "stitles"]: + assert isinstance(value, str) + + with pytest.raises(AttributeError): + setattr(info, attr, "any_value") + + assert "PyMAPDL" in mapdl.info.__repr__() + out = info.__str__() + + assert "ansys" in out.lower() + assert "Product" in out + assert "MAPDL Version" in out + assert "UPDATE" in out + + +def test_info_title(mapdl): + title = "this is my title" + mapdl.info.title = title + assert title == mapdl.info.title + + +def test_info_stitle(mapdl): + info = mapdl.info + + assert all([not each for each in info.stitles]) + stitles = ["asfd", "qwer", "zxcv", "jkl"] + info.stitles = "\n".join(stitles) + + assert stitles == info.stitles + + stitles = stitles[::-1] + + info.stitles = stitles + assert stitles == info.stitles + + info.stitles = None + assert all([not each for each in info.stitles]) diff --git a/tests/test_misc.py b/tests/test_misc.py index b556dcafba..d9fc88681a 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -21,7 +21,6 @@ # SOFTWARE. """Small or misc tests that don't fit in other test modules""" -import inspect import os import pathlib @@ -147,48 +146,6 @@ def fun( assert np.allclose(last_keypoint, np.array([1, 1, 1, 1])) -def test_mapdl_info(mapdl, capfd): - info = mapdl.info - for attr, value in inspect.getmembers(info): - if not attr.startswith("_") and attr not in ["title", "stitles"]: - assert isinstance(value, str) - - with pytest.raises(AttributeError): - setattr(info, attr, "any_value") - - assert "PyMAPDL" in mapdl.info.__repr__() - out = info.__str__() - - assert "ansys" in out.lower() - assert "Product" in out - assert "MAPDL Version" in out - assert "UPDATE" in out - - -def test_info_title(mapdl): - title = "this is my title" - mapdl.info.title = title - assert title == mapdl.info.title - - -def test_info_stitle(mapdl): - info = mapdl.info - - assert all([not each for each in info.stitles]) - stitles = ["asfd", "qwer", "zxcv", "jkl"] - info.stitles = "\n".join(stitles) - - assert stitles == info.stitles - - stitles = stitles[::-1] - - info.stitles = stitles - assert stitles == info.stitles - - info.stitles = None - assert all([not each for each in info.stitles]) - - @pytest.mark.parametrize("file_", ["dummy.dumdum", "dumdum.dummy"]) def test_load_file_local(mapdl, tmpdir, file_): """Checking 'load_file' function. From 84085c345c47ed6c67cad05c1bba11fe4d0df30c Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Thu, 17 Oct 2024 18:40:19 +0200 Subject: [PATCH 18/33] ci: avoiding linkcheck on changelog page (#3488) * ci: avoiding linkcheck on changelog page * chore: adding changelog file 3488.documentation.md [dependabot-skip] --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3488.documentation.md | 1 + doc/source/conf.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 doc/changelog.d/3488.documentation.md diff --git a/doc/changelog.d/3488.documentation.md b/doc/changelog.d/3488.documentation.md new file mode 100644 index 0000000000..ed1b85597f --- /dev/null +++ b/doc/changelog.d/3488.documentation.md @@ -0,0 +1 @@ +ci: avoiding linkcheck on changelog page \ No newline at end of file diff --git a/doc/source/conf.py b/doc/source/conf.py index d4e73a7a0b..235c3db45b 100755 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -214,7 +214,7 @@ rst_epilog += f.read() # Broken anchors: -linkcheck_exclude_documents = ["index"] +linkcheck_exclude_documents = ["index", "changelog"] linkcheck_anchors_ignore_for_url = ["https://docs.pyvista.org/api/*"] linkcheck_ignore = [ "https://github.com/ansys/pymapdl/*", From 4bb5192f9426d16b9e608e395a50a419789705e4 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Thu, 17 Oct 2024 19:25:03 +0200 Subject: [PATCH 19/33] refactor: `__init__` file (#3490) * refactor: cleaning up the init file * feat: centralizing globals in `__init__` file * chore: adding changelog file 3490.added.md [dependabot-skip] * feat: adding missing import --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3490.added.md | 1 + src/ansys/mapdl/core/__init__.py | 169 +++++++++++++---------------- src/ansys/mapdl/core/helpers.py | 102 +++++++++++++++++ src/ansys/mapdl/core/launcher.py | 23 ++-- src/ansys/mapdl/core/licensing.py | 10 +- src/ansys/mapdl/core/mapdl_grpc.py | 12 +- src/ansys/mapdl/core/misc.py | 18 +-- src/ansys/mapdl/core/pool.py | 9 +- tests/conftest.py | 20 +--- 9 files changed, 201 insertions(+), 163 deletions(-) create mode 100644 doc/changelog.d/3490.added.md create mode 100644 src/ansys/mapdl/core/helpers.py diff --git a/doc/changelog.d/3490.added.md b/doc/changelog.d/3490.added.md new file mode 100644 index 0000000000..93351e68ee --- /dev/null +++ b/doc/changelog.d/3490.added.md @@ -0,0 +1 @@ +refactor: `__init__` file \ No newline at end of file diff --git a/src/ansys/mapdl/core/__init__.py b/src/ansys/mapdl/core/__init__.py index 05842784a0..dd8981461b 100644 --- a/src/ansys/mapdl/core/__init__.py +++ b/src/ansys/mapdl/core/__init__.py @@ -20,102 +20,89 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -# Importing logging +import importlib.metadata as importlib_metadata + +############################################################################### +# Imports +# ======= +# import logging import os import sys +from typing import Dict, List, Tuple from warnings import warn -import platformdirs - -# Setup data directory -USER_DATA_PATH = platformdirs.user_data_dir( - appname="ansys_mapdl_core", appauthor="Ansys" -) -if not os.path.exists(USER_DATA_PATH): # pragma: no cover - os.makedirs(USER_DATA_PATH) - -DEPRECATING_MINIMUM_PYTHON_VERSION = True -MINIMUM_PYTHON_VERSION = (3, 10) - -first_time_file = os.path.join(USER_DATA_PATH, ".firstime") -if not os.path.exists(first_time_file): # pragma: no cover - py_ver = f"{sys.version_info[0]}.{sys.version_info[1]}" - py_ver_min = f"{MINIMUM_PYTHON_VERSION[0]}.{MINIMUM_PYTHON_VERSION[1]}" - - if ( - sys.version_info[1] == MINIMUM_PYTHON_VERSION[1] - and DEPRECATING_MINIMUM_PYTHON_VERSION - ): - warn( - f"Support for Python {py_ver} will be dropped in the next minor " "release." - ) - - if sys.version_info[1] <= MINIMUM_PYTHON_VERSION[1]: - warn( - f"Python {py_ver} is not being tested or officially supported. " - "It is recommended you use a newer version of Python. " - f"The mininimum supported and tested version is {py_ver_min}.\n\n" - "**This warning is shown only the first time you run PyMAPDL.**\n" - ) - - with open(first_time_file, "w") as fid: - fid.write("") - -EXAMPLES_PATH = os.path.join(USER_DATA_PATH, "examples") +from platformdirs import user_data_dir +############################################################################### +# Logging +# ======= +# from ansys.mapdl.core.logging import Logger LOG = Logger(level=logging.ERROR, to_file=False, to_stdout=True) LOG.debug("Loaded logging module as LOG") +############################################################################### +# Globals +# ======= +# +from ansys.mapdl.core.helpers import is_installed, run_every_import, run_first_time -BUILDING_GALLERY = False -RUNNING_TESTS = False - -if RUNNING_TESTS: # pragma: no cover - LOG.debug("Running tests on Pytest") - -_LOCAL_PORTS = [] - +__version__: str = importlib_metadata.version(__name__.replace(".", "-")) -try: - from ansys.tools.visualization_interface import Plotter +# A dictionary relating PyMAPDL server versions with the unified install ones +VERSION_MAP: Dict[Tuple[int, int, int], str] = { + (0, 0, 0): "2020R2", + (0, 3, 0): "2021R1", + (0, 4, 0): "2021R2", + (0, 4, 1): "2021R2", + (0, 5, 0): "2022R1", + (0, 5, 1): "2022R2", +} - _HAS_VISUALIZER = True -except ModuleNotFoundError: # pragma: no cover - LOG.debug("The module 'ansys-tools-visualization_interface' is not installed.") - _HAS_VISUALIZER = False +BUILDING_GALLERY: bool = False +RUNNING_TESTS: bool = False -try: - import pyvista as pv +DEPRECATING_MINIMUM_PYTHON_VERSION: bool = True +MINIMUM_PYTHON_VERSION: Tuple[int, int] = (3, 10) - _HAS_PYVISTA = True -except ModuleNotFoundError: # pragma: no cover - LOG.debug("The module 'pyvista' is not installed.") - _HAS_PYVISTA = False +# Import related globals +_HAS_ATP: bool = is_installed("ansys.tools.path") +_HAS_PIM: bool = is_installed("ansys.platform.instancemanagement") +_HAS_PYANSYS_REPORT: bool = is_installed("ansys.tools.report") +_HAS_PYVISTA: bool = is_installed("pyvista") +_HAS_TQDM: bool = is_installed("tqdm") +_HAS_VISUALIZER: bool = is_installed("ansys.tools.visualization_interface") +# Setup directories +USER_DATA_PATH: str = user_data_dir(appname="ansys_mapdl_core", appauthor="Ansys") +EXAMPLES_PATH = os.path.join(USER_DATA_PATH, "examples") -try: - import importlib.metadata as importlib_metadata -except ModuleNotFoundError: # pragma: no cover - import importlib_metadata +# Store local ports +_LOCAL_PORTS: List[int] = [] -__version__ = importlib_metadata.version(__name__.replace(".", "-")) +############################################################################### +# First time +# ========== +# +# This function runs only the first time PyMAPDL is importad after it is installed. +# It creates the required directories and raise Python version related warnings. +# +run_first_time() -try: - from ansys.tools.path.path import ( - change_default_ansys_path, - find_ansys, - get_ansys_path, - get_available_ansys_installations, - save_ansys_path, - ) -except: - # We don't really use these imports in the library. They are here for - # convenience. - pass +############################################################################### +# Runs every time +# =============== +# +# This function runs every time that PyMAPDL is imported. +# +run_every_import() +############################################################################### +# Library imports +# =============== +# from ansys.mapdl.core._version import SUPPORTED_ANSYS_VERSIONS from ansys.mapdl.core.convert import convert_apdl_block, convert_script from ansys.mapdl.core.launcher import close_all_local_instances @@ -131,23 +118,19 @@ from ansys.mapdl.core.misc import Report, _check_has_ansys from ansys.mapdl.core.pool import MapdlPool -_HAS_ANSYS = _check_has_ansys() +############################################################################### +# Convenient imports +# ================== +# +# For compatibility with other versions or for convenience +if _HAS_ATP: + from ansys.tools.path.path import ( + change_default_ansys_path, + find_ansys, + get_ansys_path, + get_available_ansys_installations, + save_ansys_path, + ) if _HAS_VISUALIZER: - from ansys.mapdl.core.plotting.theme import _apply_default_theme - - _apply_default_theme() - -BUILDING_GALLERY = False -RUNNING_TESTS = False - - -VERSION_MAP = { - (0, 0, 0): "2020R2", - (0, 3, 0): "2021R1", - (0, 4, 0): "2021R2", - (0, 4, 1): "2021R2", - (0, 5, 0): "2022R1", - (0, 5, 1): "2022R2", # as of 21 Mar 2022 unreleased -} -"""A dictionary relating PyMAPDL server versions with the unified install ones.""" + from ansys.tools.visualization_interface import Plotter diff --git a/src/ansys/mapdl/core/helpers.py b/src/ansys/mapdl/core/helpers.py new file mode 100644 index 0000000000..edb29704ab --- /dev/null +++ b/src/ansys/mapdl/core/helpers.py @@ -0,0 +1,102 @@ +# Copyright (C) 2016 - 2024 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Module for helper functions""" + +import importlib +import os +import sys +from warnings import warn + +from ansys.mapdl.core import LOG + + +def is_installed(package_name: str) -> bool: + """Check if a package is installed""" + + if os.name == "nt": + package_name = package_name.replace("-", ".") + + try: + importlib.import_module(package_name) + + return True + except ModuleNotFoundError: # pragma: no cover + LOG.debug(f"The module '{package_name}' is not installed.") + return False + + +def run_first_time() -> None: + """Run this function the first time PyMAPDL is imported""" + from ansys.mapdl.core import ( + DEPRECATING_MINIMUM_PYTHON_VERSION, + MINIMUM_PYTHON_VERSION, + USER_DATA_PATH, + ) + + first_time_file: str = os.path.join(USER_DATA_PATH, ".firstime") + + # Run the first time only + if not os.path.exists(first_time_file): # pragma: no cover + + # Create USER_DATA_PATH directory + if not os.path.exists(USER_DATA_PATH): # pragma: no cover + os.makedirs(USER_DATA_PATH) + + # Show warning about Python compatibility + py_ver = f"{sys.version_info[0]}.{sys.version_info[1]}" + py_ver_min = f"{MINIMUM_PYTHON_VERSION[0]}.{MINIMUM_PYTHON_VERSION[1]}" + + if ( + sys.version_info[1] == MINIMUM_PYTHON_VERSION[1] + and DEPRECATING_MINIMUM_PYTHON_VERSION + ): + warn( + f"Support for Python {py_ver} will be dropped in the next minor " + "release." + ) + + if sys.version_info[1] <= MINIMUM_PYTHON_VERSION[1]: + warn( + f"Python {py_ver} is not being tested or officially supported. " + "It is recommended you use a newer version of Python. " + f"The mininimum supported and tested version is {py_ver_min}.\n\n" + "**This warning is shown only the first time you run PyMAPDL.**\n" + ) + + with open(first_time_file, "w") as fid: + fid.write("") + + +def run_every_import() -> None: + # Run every time we import PyMAPDL + from ansys.mapdl.core import _HAS_VISUALIZER, RUNNING_TESTS + + # Apply custom theme + if _HAS_VISUALIZER: + from ansys.mapdl.core.plotting.theme import _apply_default_theme + + _apply_default_theme() + + # In case we want to do something specific for testing. + if RUNNING_TESTS: # pragma: no cover + LOG.debug("Running tests on Pytest") diff --git a/src/ansys/mapdl/core/launcher.py b/src/ansys/mapdl/core/launcher.py index 83d74453be..047404f64c 100644 --- a/src/ansys/mapdl/core/launcher.py +++ b/src/ansys/mapdl/core/launcher.py @@ -36,23 +36,8 @@ import psutil -try: - import ansys.platform.instancemanagement as pypim - - _HAS_PIM = True - -except ModuleNotFoundError: # pragma: no cover - _HAS_PIM = False - -try: - from ansys.tools.path import find_ansys, get_ansys_path, version_from_path - - _HAS_ATP = True -except ModuleNotFoundError: - _HAS_ATP = False - from ansys.mapdl import core as pymapdl -from ansys.mapdl.core import LOG +from ansys.mapdl.core import _HAS_ATP, _HAS_PIM, LOG from ansys.mapdl.core._version import SUPPORTED_ANSYS_VERSIONS from ansys.mapdl.core.errors import ( LockFileException, @@ -72,6 +57,12 @@ threaded, ) +if _HAS_PIM: + import ansys.platform.instancemanagement as pypim + +if _HAS_ATP: + from ansys.tools.path import find_ansys, get_ansys_path, version_from_path + if TYPE_CHECKING: # pragma: no cover from ansys.mapdl.core.mapdl_console import MapdlConsole diff --git a/src/ansys/mapdl/core/licensing.py b/src/ansys/mapdl/core/licensing.py index 8740084ca9..28c68bea3b 100644 --- a/src/ansys/mapdl/core/licensing.py +++ b/src/ansys/mapdl/core/licensing.py @@ -27,19 +27,13 @@ import subprocess import time -from ansys.mapdl.core import LOG +from ansys.mapdl.core import _HAS_ATP, LOG from ansys.mapdl.core.errors import LicenseServerConnectionError from ansys.mapdl.core.misc import threaded_daemon -try: +if _HAS_ATP: from ansys.tools.path import get_ansys_path, version_from_path - _HAS_ATP = True - -except ModuleNotFoundError: - _HAS_ATP = False - - LOCALHOST = "127.0.0.1" LIC_PATH_ENVAR = "ANSYSLIC_DIR" LIC_FILE_ENVAR = "ANSYSLMD_LICENSE_FILE" diff --git a/src/ansys/mapdl/core/mapdl_grpc.py b/src/ansys/mapdl/core/mapdl_grpc.py index 72f4b9e054..28210b8f40 100644 --- a/src/ansys/mapdl/core/mapdl_grpc.py +++ b/src/ansys/mapdl/core/mapdl_grpc.py @@ -64,7 +64,7 @@ except ImportError: # pragma: no cover raise ImportError(MSG_IMPORT) -from ansys.mapdl.core import _LOCAL_PORTS, __version__ +from ansys.mapdl.core import _HAS_TQDM, __version__ from ansys.mapdl.core.common_grpc import ( ANSYS_VALUE_TYPE, DEFAULT_CHUNKSIZE, @@ -92,13 +92,9 @@ # Checking if tqdm is installed. # If it is, the default value for progress_bar is true. -try: +if _HAS_TQDM: from tqdm import tqdm - _HAS_TQDM = True -except ModuleNotFoundError: # pragma: no cover - _HAS_TQDM = False - if TYPE_CHECKING: # pragma: no cover from queue import Queue @@ -1125,8 +1121,8 @@ def exit(self, save=False, force=False, **kwargs): self._remove_temp_dir_on_exit(mapdl_path) - if self._local and self._port in _LOCAL_PORTS: - _LOCAL_PORTS.remove(self._port) + if self._local and self._port in pymapdl._LOCAL_PORTS: + pymapdl._LOCAL_PORTS.remove(self._port) def _remove_temp_dir_on_exit(self, path=None): """Removes the temporary directory created by the launcher. diff --git a/src/ansys/mapdl/core/misc.py b/src/ansys/mapdl/core/misc.py index 28609486ea..efbc53295a 100644 --- a/src/ansys/mapdl/core/misc.py +++ b/src/ansys/mapdl/core/misc.py @@ -36,25 +36,17 @@ from typing import Union from warnings import warn -try: - from ansys.tools.path import get_available_ansys_installations - - _HAS_ATP = True -except ModuleNotFoundError: - _HAS_ATP = False - import numpy as np from ansys.mapdl import core as pymapdl -from ansys.mapdl.core import _HAS_PYVISTA, LOG +from ansys.mapdl.core import _HAS_ATP, _HAS_PYANSYS_REPORT, _HAS_PYVISTA, LOG -try: +if _HAS_ATP: + from ansys.tools.path import get_available_ansys_installations + +if _HAS_PYANSYS_REPORT: import ansys.tools.report as pyansys_report - _HAS_PYANSYS_REPORT = True -except ModuleNotFoundError: # pragma: no cover - LOG.debug("The package 'pyansys-tools-report' is not installed.") - _HAS_PYANSYS_REPORT = False # path of this module MODULE_PATH = os.path.dirname(inspect.getfile(inspect.currentframe())) diff --git a/src/ansys/mapdl/core/pool.py b/src/ansys/mapdl/core/pool.py index 49b0d4903c..8e45a424a9 100755 --- a/src/ansys/mapdl/core/pool.py +++ b/src/ansys/mapdl/core/pool.py @@ -29,7 +29,7 @@ import warnings import weakref -from ansys.mapdl.core import LOG, launch_mapdl +from ansys.mapdl.core import _HAS_ATP, _HAS_TQDM, LOG, launch_mapdl from ansys.mapdl.core.errors import MapdlDidNotStart, MapdlRuntimeError, VersionError from ansys.mapdl.core.launcher import ( LOCALHOST, @@ -38,16 +38,11 @@ get_start_instance, port_in_use, ) -from ansys.mapdl.core.mapdl_grpc import _HAS_TQDM from ansys.mapdl.core.misc import create_temp_dir, threaded, threaded_daemon -try: +if _HAS_ATP: from ansys.tools.path import get_ansys_path, version_from_path - _HAS_ATP = True -except ModuleNotFoundError: - _HAS_ATP = False - if _HAS_TQDM: from tqdm import tqdm diff --git a/tests/conftest.py b/tests/conftest.py index d531594fdd..b0434fe308 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -33,6 +33,7 @@ import psutil import pytest +from ansys.mapdl.core.helpers import is_installed as has_dependency from ansys.mapdl.core.launcher import is_ansys_process from common import ( Element, @@ -135,23 +136,6 @@ ] -def import_module(requirement): - from importlib import import_module - - if os.name == "nt": - requirement = requirement.replace("-", ".") - return import_module(requirement) - - -def has_dependency(requirement): - try: - requirement = requirement.replace("-", ".") - import_module(requirement) - return True - except ModuleNotFoundError: - return False - - def requires(requirement: str): """Check requirements""" requirement = requirement.lower() @@ -239,7 +223,7 @@ def requires_dependency(dependency: str): from ansys.mapdl.core.examples import vmfiles from ansys.mapdl.core.launcher import get_start_instance, launch_mapdl -if has_dependency("ansys-tools-visualization-interface"): +if has_dependency("ansys-tools-visualization_interface"): import ansys.tools.visualization_interface as viz_interface viz_interface.TESTING_MODE = True From 134b325cfd57928829f0c0e591267ab0b5cf918b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 18 Oct 2024 09:44:29 +0200 Subject: [PATCH 20/33] build: bump psutil from 6.0.0 to 6.1.0 in the minimal group (#3492) * build: bump psutil from 6.0.0 to 6.1.0 in the minimal group Bumps the minimal group with 1 update: [psutil](https://github.com/giampaolo/psutil). Updates `psutil` from 6.0.0 to 6.1.0 - [Changelog](https://github.com/giampaolo/psutil/blob/master/HISTORY.rst) - [Commits](https://github.com/giampaolo/psutil/compare/release-6.0.0...release-6.1.0) --- updated-dependencies: - dependency-name: psutil dependency-type: direct:production update-type: version-update:semver-minor dependency-group: minimal ... Signed-off-by: dependabot[bot] * chore: adding changelog file 3492.maintenance.md [dependabot-skip] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3492.maintenance.md | 1 + minimum_requirements.txt | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 doc/changelog.d/3492.maintenance.md diff --git a/doc/changelog.d/3492.maintenance.md b/doc/changelog.d/3492.maintenance.md new file mode 100644 index 0000000000..31c89238ae --- /dev/null +++ b/doc/changelog.d/3492.maintenance.md @@ -0,0 +1 @@ +build: bump psutil from 6.0.0 to 6.1.0 in the minimal group \ No newline at end of file diff --git a/minimum_requirements.txt b/minimum_requirements.txt index 0f95558e27..7df708dc81 100644 --- a/minimum_requirements.txt +++ b/minimum_requirements.txt @@ -2,5 +2,5 @@ ansys-api-mapdl==0.5.2 importlib-metadata==8.5.0 numpy==2.1.2 platformdirs==4.3.6 -psutil==6.0.0 +psutil==6.1.0 pyansys-tools-versioning==0.6.0 From 662b8e1782e468079d5791a401d0d96ca8486a95 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 18 Oct 2024 09:45:08 +0200 Subject: [PATCH 21/33] build: bump the documentation group with 2 updates (#3495) * build: bump the documentation group with 2 updates Bumps the documentation group with 2 updates: [imageio](https://github.com/imageio/imageio) and [sphinx](https://github.com/sphinx-doc/sphinx). Updates `imageio` from 2.35.1 to 2.36.0 - [Release notes](https://github.com/imageio/imageio/releases) - [Changelog](https://github.com/imageio/imageio/blob/master/CHANGELOG.md) - [Commits](https://github.com/imageio/imageio/compare/v2.35.1...v2.36.0) Updates `sphinx` from 8.1.0 to 8.1.3 - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/v8.1.3/CHANGES.rst) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v8.1.0...v8.1.3) --- updated-dependencies: - dependency-name: imageio dependency-type: direct:production update-type: version-update:semver-minor dependency-group: documentation - dependency-name: sphinx dependency-type: direct:production update-type: version-update:semver-patch dependency-group: documentation ... Signed-off-by: dependabot[bot] * chore: adding changelog file 3495.dependencies.md [dependabot-skip] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3495.dependencies.md | 1 + pyproject.toml | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 doc/changelog.d/3495.dependencies.md diff --git a/doc/changelog.d/3495.dependencies.md b/doc/changelog.d/3495.dependencies.md new file mode 100644 index 0000000000..ed1f41ce94 --- /dev/null +++ b/doc/changelog.d/3495.dependencies.md @@ -0,0 +1 @@ +build: bump the documentation group with 2 updates \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 034522ee2f..bb13735c82 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,7 +80,7 @@ doc = [ "ansys-sphinx-theme==1.1.2", "grpcio==1.66.2", "imageio-ffmpeg==0.5.1", - "imageio==2.35.1", + "imageio==2.36.0", "jupyter==1.1.1", "jupyter_sphinx==0.5.3", "jupyterlab>=3.2.8", @@ -100,7 +100,7 @@ doc = [ "sphinx-gallery==0.18.0", "sphinx-jinja==2.0.2", "sphinx-notfound-page==1.0.4", - "sphinx==8.1.0", + "sphinx==8.1.3", "sphinxcontrib-websupport==2.0.0", "sphinxemoji==0.3.1", "vtk==9.3.1", From 7eb16ddaf4ca1d616a2d202843b922d9b1aaadee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 18 Oct 2024 09:45:59 +0200 Subject: [PATCH 22/33] build: bump ansys-sphinx-theme from 1.1.2 to 1.1.5 in the core group (#3494) * build: bump ansys-sphinx-theme from 1.1.2 to 1.1.5 in the core group Bumps the core group with 1 update: [ansys-sphinx-theme](https://github.com/ansys/ansys-sphinx-theme). Updates `ansys-sphinx-theme` from 1.1.2 to 1.1.5 - [Release notes](https://github.com/ansys/ansys-sphinx-theme/releases) - [Commits](https://github.com/ansys/ansys-sphinx-theme/compare/v1.1.2...v1.1.5) --- updated-dependencies: - dependency-name: ansys-sphinx-theme dependency-type: direct:production update-type: version-update:semver-patch dependency-group: core ... Signed-off-by: dependabot[bot] * chore: adding changelog file 3494.dependencies.md [dependabot-skip] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3494.dependencies.md | 1 + pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 doc/changelog.d/3494.dependencies.md diff --git a/doc/changelog.d/3494.dependencies.md b/doc/changelog.d/3494.dependencies.md new file mode 100644 index 0000000000..9d45698ce3 --- /dev/null +++ b/doc/changelog.d/3494.dependencies.md @@ -0,0 +1 @@ +build: bump ansys-sphinx-theme from 1.1.2 to 1.1.5 in the core group \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index bb13735c82..90f9ffac00 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,7 @@ doc = [ "ansys-dpf-core==0.10.1", "ansys-mapdl-reader==0.54.1", "ansys-tools-visualization-interface==0.4.5", - "ansys-sphinx-theme==1.1.2", + "ansys-sphinx-theme==1.1.5", "grpcio==1.66.2", "imageio-ffmpeg==0.5.1", "imageio==2.36.0", From 8feaed375a3173716becd790796db604b1369d0d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 18 Oct 2024 08:18:44 +0000 Subject: [PATCH 23/33] build: bump grpcio from 1.66.2 to 1.67.0 in the grpc-deps group (#3493) * build: bump grpcio from 1.66.2 to 1.67.0 in the grpc-deps group Bumps the grpc-deps group with 1 update: [grpcio](https://github.com/grpc/grpc). Updates `grpcio` from 1.66.2 to 1.67.0 - [Release notes](https://github.com/grpc/grpc/releases) - [Changelog](https://github.com/grpc/grpc/blob/master/doc/grpc_release_schedule.md) - [Commits](https://github.com/grpc/grpc/compare/v1.66.2...v1.67.0) --- updated-dependencies: - dependency-name: grpcio dependency-type: direct:production update-type: version-update:semver-minor dependency-group: grpc-deps ... Signed-off-by: dependabot[bot] * chore: adding changelog file 3493.dependencies.md [dependabot-skip] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: German <28149841+germa89@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3493.dependencies.md | 1 + pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 doc/changelog.d/3493.dependencies.md diff --git a/doc/changelog.d/3493.dependencies.md b/doc/changelog.d/3493.dependencies.md new file mode 100644 index 0000000000..d1f604e862 --- /dev/null +++ b/doc/changelog.d/3493.dependencies.md @@ -0,0 +1 @@ +build: bump grpcio from 1.66.2 to 1.67.0 in the grpc-deps group \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 90f9ffac00..6a266af4c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,7 +78,7 @@ doc = [ "ansys-mapdl-reader==0.54.1", "ansys-tools-visualization-interface==0.4.5", "ansys-sphinx-theme==1.1.5", - "grpcio==1.66.2", + "grpcio==1.67.0", "imageio-ffmpeg==0.5.1", "imageio==2.36.0", "jupyter==1.1.1", From ffdcfa900db6a775f0cf5239f67d49f1d8bc3213 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 18 Oct 2024 09:30:37 +0000 Subject: [PATCH 24/33] build: bump ansys-sphinx-theme from 1.1.2 to 1.1.6 in the core group across 1 directory (#3496) * build: bump ansys-sphinx-theme in the core group across 1 directory Bumps the core group with 1 update in the / directory: [ansys-sphinx-theme](https://github.com/ansys/ansys-sphinx-theme). Updates `ansys-sphinx-theme` from 1.1.2 to 1.1.6 - [Release notes](https://github.com/ansys/ansys-sphinx-theme/releases) - [Commits](https://github.com/ansys/ansys-sphinx-theme/compare/v1.1.2...v1.1.6) --- updated-dependencies: - dependency-name: ansys-sphinx-theme dependency-type: direct:production update-type: version-update:semver-patch dependency-group: core ... Signed-off-by: dependabot[bot] * chore: adding changelog file 3496.dependencies.md [dependabot-skip] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3496.dependencies.md | 1 + pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 doc/changelog.d/3496.dependencies.md diff --git a/doc/changelog.d/3496.dependencies.md b/doc/changelog.d/3496.dependencies.md new file mode 100644 index 0000000000..f7c6c74b3d --- /dev/null +++ b/doc/changelog.d/3496.dependencies.md @@ -0,0 +1 @@ +build: bump ansys-sphinx-theme from 1.1.2 to 1.1.6 in the core group across 1 directory \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 6a266af4c6..480246e92e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,7 @@ doc = [ "ansys-dpf-core==0.10.1", "ansys-mapdl-reader==0.54.1", "ansys-tools-visualization-interface==0.4.5", - "ansys-sphinx-theme==1.1.5", + "ansys-sphinx-theme==1.1.6", "grpcio==1.67.0", "imageio-ffmpeg==0.5.1", "imageio==2.36.0", From e8248a3f8dfa005967372febf8252147f25dafd8 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Mon, 21 Oct 2024 11:49:41 +0200 Subject: [PATCH 25/33] feat: passing tight integration env vars to mapdl (#3500) * feat: adding env vars needed for multinode * feat: adding env vars needed for multinode * feat: renaming hpc detection argument * docs: adding documentation * chore: adding changelog file 3466.documentation.md * feat: adding env vars needed for multinode * feat: renaming hpc detection argument * docs: adding documentation * chore: adding changelog file 3466.documentation.md * fix: vale issues * chore: To fix sphinx build Squashed commit of the following: commit c1d1a3ea278e6461bcc91e1c965f6e6a46d00bc3 Author: German <28149841+germa89@users.noreply.github.com> Date: Mon Oct 7 15:33:19 2024 +0200 ci: retrigger CICD commit b7b5c30a422413d203a31f5a29b7e57f93a0ab08 Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon Oct 7 13:31:55 2024 +0000 ci: auto fixes from pre-commit.com hooks. for more information, see https://pre-commit.ci commit 32a1c0203fc5101f429aafafba26a28cc06bf24c Author: Revathy Venugopal <104772255+Revathyvenugopal162@users.noreply.github.com> Date: Mon Oct 7 15:31:24 2024 +0200 fix: add suggestions Co-authored-by: German <28149841+germa89@users.noreply.github.com> commit 575a219ef8b135b234f2ec5f24a9585298845eca Merge: f2afe139f be1be2e2c Author: Revathyvenugopal162 Date: Mon Oct 7 15:09:01 2024 +0200 Merge branch 'fix/add-build-cheatsheet-as-env-varaible' of https://github.com/ansys/pymapdl into fix/add-build-cheatsheet-as-env-varaible commit f2afe139f693f4f1979506662c514692280487a9 Author: Revathyvenugopal162 Date: Mon Oct 7 15:08:58 2024 +0200 fix: precommit commit be1be2e2ca4f8736db0b180ab3d8cc6bff696412 Author: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> Date: Mon Oct 7 13:07:35 2024 +0000 chore: adding changelog file 3468.fixed.md commit f052a4dba77cb586be59232d2627d7814077f094 Author: Revathyvenugopal162 Date: Mon Oct 7 15:05:56 2024 +0200 fix: add build cheatsheet as env variable within doc-build * docs: expanding a bit troubleshooting advices and small format fix * docs: fix vale * fix: nproc tests * feat: adding env vars needed for multinode * feat: renaming hpc detection argument * docs: adding documentation * chore: adding changelog file 3466.documentation.md * fix: vale issues * docs: fix vale * docs: expanding a bit troubleshooting advices and small format fix * fix: nproc tests * revert: "chore: To fix sphinx build" This reverts commit e45d2e5d4fb97359605f445f462fa4b9cf76515a. * docs: clarifying where everything is running. * docs: expanding bash example * tests: fix * docs: adding `PYMAPDL_NPROC` to env var section * docs: fix vale issue * docs: fix vale issue * fix: replacing env var name * fix: unit tests * chore: adding changelog file 3500.documentation.md [dependabot-skip] --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3466.documentation.md | 1 + doc/changelog.d/3500.documentation.md | 1 + .../extended_examples/hpc/hpc_ml_ga.rst | 2 +- doc/source/user_guide/hpc/pymapdl.rst | 184 ++++++++++++++---- doc/source/user_guide/hpc/settings.rst | 49 +++-- doc/source/user_guide/hpc/troubleshooting.rst | 137 ++++++++++--- doc/source/user_guide/mapdl.rst | 179 +++++++++-------- doc/source/user_guide/troubleshoot.rst | 1 + src/ansys/mapdl/core/launcher.py | 32 ++- tests/test_launcher.py | 47 +++-- 10 files changed, 441 insertions(+), 192 deletions(-) create mode 100644 doc/changelog.d/3466.documentation.md create mode 100644 doc/changelog.d/3500.documentation.md diff --git a/doc/changelog.d/3466.documentation.md b/doc/changelog.d/3466.documentation.md new file mode 100644 index 0000000000..902767602d --- /dev/null +++ b/doc/changelog.d/3466.documentation.md @@ -0,0 +1 @@ +feat: passing tight integration env vars to mapdl \ No newline at end of file diff --git a/doc/changelog.d/3500.documentation.md b/doc/changelog.d/3500.documentation.md new file mode 100644 index 0000000000..902767602d --- /dev/null +++ b/doc/changelog.d/3500.documentation.md @@ -0,0 +1 @@ +feat: passing tight integration env vars to mapdl \ No newline at end of file diff --git a/doc/source/examples/extended_examples/hpc/hpc_ml_ga.rst b/doc/source/examples/extended_examples/hpc/hpc_ml_ga.rst index 30570b5c6b..fb87bb7e6d 100644 --- a/doc/source/examples/extended_examples/hpc/hpc_ml_ga.rst +++ b/doc/source/examples/extended_examples/hpc/hpc_ml_ga.rst @@ -251,7 +251,7 @@ this script. If you have problems when creating the virtual environment or accessing it from the compute nodes, - see :ref:`ref_hpc_pymapdl_job`. + see :ref:`ref_hpc_troubleshooting`. 3. Install the requirements for this example from the :download:`requirements.txt ` file. diff --git a/doc/source/user_guide/hpc/pymapdl.rst b/doc/source/user_guide/hpc/pymapdl.rst index e0fddefa78..6f66ec52ca 100644 --- a/doc/source/user_guide/hpc/pymapdl.rst +++ b/doc/source/user_guide/hpc/pymapdl.rst @@ -1,84 +1,184 @@ -.. _ref_hpc_pymapdl: +.. _ref_hpc_pymapdl_job: -============================= -PyMAPDL on SLURM HPC clusters -============================= +======================= +PyMAPDL on HPC Clusters +======================= -.. _ref_hpc_pymapdl_job: -Submit a PyMAPDL job -==================== +Introduction +============ -To submit a PyMAPDL job, you must create two files: +PyMAPDL communicates with MAPDL using the gRPC protocol. +This protocol offers many advantages and features, for more information +see :ref:`ref_project_page`. +One of these features is that it is not required to have both, +PyMAPDL and MAPDL processes, running on the same machine. +This possibility open the door to many configurations, depending +on whether you run them both or not on the HPC compute nodes. +Additionally, you might to be able interact with them (``interactive`` mode) +or not (``batch`` mode). -- Python script with the PyMAPDL code -- Bash script that activates the virtual environment and calls the Python script +Currently, the supported configurations are: + +* :ref:`ref_pymapdl_batch_in_cluster_hpc` + + +Since v0.68.5, PyMAPDL can take advantage of the tight integration +between the scheduler and MAPDL to read the job configuration and +launch an MAPDL instance that can use all the resources allocated +to that job. +For instance, if a SLURM job has allocated 8 nodes with 4 cores each, +then PyMAPDL launches an MAPDL instance which uses 32 cores +spawning across those 8 nodes. +This behaviour can turn off if passing the environment variable +:envvar:`PYMAPDL_ON_SLURM` or passing the argument `detect_HPC=False` +to :func:`launch_mapdl() `. + + +.. _ref_pymapdl_batch_in_cluster_hpc: + +Submit a PyMAPDL batch job to the cluster from the entrypoint node +================================================================== + +Many HPC clusters allow their users to login in a machine using +``ssh``, ``vnc``, ``rdp``, or similar technologies and submit a job +to the cluster from there. +This entrypoint machine, sometimes known as *head node* or *entrypoint node*, +might be a virtual machine (VDI/VM). + +In such cases, once the Python virtual environment with PyMAPDL is already +set and is accessible to all the compute nodes, launching a +PyMAPDL job from the entrypoint is very easy to do using ``sbatch`` command. +Using ``sbatch`` command, the PyMAPDL runs and launches an MAPDL instance in +the compute nodes. +No changes are needed on a PyMAPDL script to run it on an SLURM cluster. + +First the virtual environment must be activated in the current terminal. + +.. code-block:: console -**Python script:** ``pymapdl_script.py`` + user@entrypoint-machine:~$ export VENV_PATH=/my/path/to/the/venv + user@entrypoint-machine:~$ source $VENV_PATH/bin/activate + +Once the virtual environment has been activated, you can launch any Python +script if they do have the proper Python shebang (``#!/usr/bin/env python3``). + +For instance, to launch the following Python script ``main.py``: .. code-block:: python + :caption: main.py + + #!/usr/bin/env python3 from ansys.mapdl.core import launch_mapdl - # Number of processors must be lower than the - # number of CPUs allocated for the job. - mapdl = launch_mapdl(nproc=10) + mapdl = launch_mapdl(run_location="/home/ubuntu/tmp/tmp/mapdl", loglevel="debug") - mapdl.prep7() - n_proc = mapdl.get_value("ACTIVE", 0, "NUMCPU") - print(f"Number of CPUs: {n_proc}") + print(mapdl.prep7()) + print(f'Number of CPU: {mapdl.get_value("ACTIVE", 0, "NUMCPU")}') mapdl.exit() +You can just run in your console: -**Bash script:** ``job.sh`` - -.. code-block:: bash +.. code-block:: console - source /home/user/.venv/bin/activate - python pymapdl_script.py + (venv) user@entrypoint-machine:~$ sbatch main.py -To start the simulation, you use this code: +Alternatively, you can remove the shebang from the python file and use a +Python executable call: .. code-block:: console - user@machine:~$ srun job.sh + (venv) user@entrypoint-machine:~$ sbatch python main.py + +Additionally, you can change the amount of cores used in your +job, by setting the :envvar:`PYMAPDL_NPROC` to the desired value. + +.. code-block:: console + (venv) user@entrypoint-machine:~$ PYMAPDL_NPROC=4 sbatch main.py -The bash script allows you to customize the environment before running the Python script. -This bash script performs such tasks as creating environment variables, moving to -different directories, and printing to ensure your configuration is correct. However, -this bash script is not mandatory. -You can avoid having the ``job.sh`` bash script if the virtual environment is activated -and you pass all the environment variables to the job: +You can also add ``sbatch`` options to the command: .. code-block:: console - user@machine:~$ source /home/user/.venv/bin/activate - (.venv) user@machine:~$ srun python pymapdl_script.py --export=ALL + (venv) user@entrypoint-machine:~$ PYMAPDL_NPROC=4 sbatch main.py -The ``--export=ALL`` argument might not be needed, depending on the cluster configuration. -Furthermore, you can omit the Python call in the preceding command if you include the -Python shebang (``#!/usr/bin/python3``) in the first line of the ``pymapdl_script.py`` script. +For instance, to launch a PyMAPDL job which start a four cores MAPDL instance +on a 10 CPU SLURM job, you can use: .. code-block:: console - user@machine:~$ source /home/user/.venv/bin/activate - (.venv) user@machine:~$ srun pymapdl_script.py --export=ALL + (venv) user@entrypoint-machine:~$ PYMAPDL_NPROC=4 sbatch --partition=qsmall --nodes=10 --ntasks-per-node=1 main.py + + +Using a submission script +------------------------- + +In case you need to customize more your job, you can create a SLURM +submission script to submit a PyMAPDL job. +In this case, you must create two files: + +- Python script with the PyMAPDL code +- Bash script that activates the virtual environment and calls the + Python script. + +.. code-block:: python + :caption: main.py -If you prefer to run the job in the background, you can use the ``sbatch`` -command instead of the ``srun`` command. However, in this case, the Bash file is needed: + from ansys.mapdl.core import launch_mapdl + + # Number of processors must be lower than the + # number of CPU allocated for the job. + mapdl = launch_mapdl(nproc=10) + + mapdl.prep7() + n_proc = mapdl.get_value("ACTIVE", 0, "NUMCPU") + print(f"Number of CPU: {n_proc}") + + mapdl.exit() + + +.. code-block:: bash + :caption: job.sh + + #!/bin/bash + # Set SLURM options + #SBATCH --job-name=ansys_job # Job name + #SBATCH --partition=qsmall # Specify the queue/partition name + #SBATCH --nodes=5 # Number of nodes + #SBATCH --ntasks-per-node=2 # Number of tasks (cores) per node + #SBATCH --time=04:00:00 # Set a time limit for the job (optional but recommended) + + # Set env vars + export MY_ENV_VAR=VALUE + + # Activating Python virtual environment + source /home/user/.venv/bin/activate + # Calling Python script + python main.py + +To start the simulation, you use this code: .. code-block:: console user@machine:~$ sbatch job.sh - Submitted batch job 1 -Here is the expected output of the job: +In this case, the Python virtual environment does not need to be activated +before submission since it is activated later in the script. + +The expected output of the job is .. code-block:: text - Number of CPUs: 10.0 + Number of CPU: 10.0 + +The bash script allows you to customize the environment before running the +Python script. +This bash script performs tasks such as creating environment variables, +moving files to different directories, and printing to ensure your +configuration is correct. diff --git a/doc/source/user_guide/hpc/settings.rst b/doc/source/user_guide/hpc/settings.rst index 7f6af61c63..f4366ab6f0 100644 --- a/doc/source/user_guide/hpc/settings.rst +++ b/doc/source/user_guide/hpc/settings.rst @@ -7,14 +7,16 @@ Setting PyMAPDL Requirements ============ -Using PyMAPDL in an HPC environment managed by SLURM scheduler has certain requirements: +Using PyMAPDL in an HPC environment managed by SLURM scheduler has certain +requirements: * **An Ansys installation must be accessible from all the compute nodes**. This normally implies that the ``ANSYS`` installation directory is in a shared drive or directory. Your HPC cluster administrator should provide you with the path to the ``ANSYS`` directory. -* **A compatible Python installation must be accessible from all the compute nodes**. +* **A compatible Python installation must be accessible from all the compute + nodes**. For compatible Python versions, see :ref:`ref_pymapdl_installation`. Additionally, you must perform a few key steps to ensure efficient job @@ -23,8 +25,8 @@ execution and resource utilization. Subsequent topics describe these steps. Check the Python installation ============================= -The PyMAPDL Python package (``ansys-mapdl-core``) must be installed in a virtual -environment that is accessible from the compute nodes. +The PyMAPDL Python package (``ansys-mapdl-core``) must be installed in +a virtual environment that is accessible from the compute nodes. To see where your Python distribution is installed, use this code: @@ -40,9 +42,10 @@ To print the version of Python you have available, use this code: user@machine:~$ python3 --version Python 3.9.16 -You should be aware that your machine might have installed other Python versions. -To find out if those installations are already in the ``PATH`` environment variable, -you can press the **Tab** key to use autocomplete: +You should be aware that your machine might have other Python versions +installed. +To find out if those installations are already in the ``PATH`` environment +variable, you can press the **Tab** key to use autocomplete: .. code-block:: console @@ -55,11 +58,20 @@ you can press the **Tab** key to use autocomplete: You should use a Python version that is compatible with PyMAPDL. For more information, see :ref:`ref_pymapdl_installation`. -The ``which`` command returns the path where the Python executable is installed. -You can use that executable to create your own Python virtual environment in a directory -that is accessible from all the compute nodes. -For most HPC clusters, the ``/home/$user`` directory is generally available to all nodes. -You can then create the virtual environment in the ``/home/user/.venv`` directory: +.. warning:: + + Contact your cluster administrator if you cannot find a Python version + compatible with PyMAPDL. + + +The ``which`` command returns the path where the Python executable is +installed. +You can use that executable to create your own Python virtual environment +in a directory that is accessible from all the compute nodes. +For most HPC clusters, the ``/home/$user`` directory is generally available +to all nodes. +You can then create the virtual environment in the ``/home/user/.venv`` +directory: .. code-block:: console @@ -67,11 +79,13 @@ You can then create the virtual environment in the ``/home/user/.venv`` director After activating the virtual environment, you can install PyMAPDL. +.. _ref_install_pymapdl_on_hpc: Install PyMAPDL =============== -To install PyMAPDL on the activated virtual environment, run the following commands: +To install PyMAPDL on the activated virtual environment, run the following +commands: .. code-block:: console @@ -107,8 +121,8 @@ then you can run that script using: user@machine:~$ srun test.sh -This command might take a minute or two to complete, depending on the amount of free -resources available in the cluster. +This command might take a minute or two to complete, depending on the amount of +free resources available in the cluster. On the console, you should see this output: .. code-block:: text @@ -116,5 +130,6 @@ On the console, you should see this output: Testing Python! PyMAPDL version 0.68.1 was successfully imported. -If you see an error in the output, see :ref:`ref_hpc_troubleshooting`, especially -:ref:`ref_python_venv_not_accesible`. +If you see an error in the output, see :ref:`ref_hpc_troubleshooting`, +especially :ref:`ref_python_venv_not_accesible`. + diff --git a/doc/source/user_guide/hpc/troubleshooting.rst b/doc/source/user_guide/hpc/troubleshooting.rst index 3a41a60537..528c00fea9 100644 --- a/doc/source/user_guide/hpc/troubleshooting.rst +++ b/doc/source/user_guide/hpc/troubleshooting.rst @@ -7,10 +7,41 @@ Troubleshooting Debugging jobs -------------- -- Use ``--output`` and ``--error`` directives in batch scripts to capture - standard output and error messages. +- Use ``--output`` and ``--error`` directives in batch scripts to captures + standard output and error messages to specific files. + + .. code-block:: bash + + #!/bin/bash + #SBATCH --job-name=ansys_job # Job name + #SBATCH --partition=qsmall # Specify the queue/partition name + #SBATCH --output=ansys_job.out # Standard output file + #SBATCH --error=ansys_job.err # Standard error file + + source /home/user/pymapdl/.venv/bin/activate + python /home/user/pymapdl.py - Check SLURM logs for error messages and debugging information. +- It is also good idea to print the environment variables in your bash script, using + ``printenv``. Additionally, you can filter them using ``grep``. + + .. code-block:: bash + + #!/bin/bash + #SBATCH --job-name=ansys_job # Job name + #SBATCH --partition=qsmall # Specify the queue/partition name + #SBATCH --output=ansys_job.out # Standard output file + #SBATCH --error=ansys_job.err # Standard error file + + printenv | grep "PYMAPDL" # Print env vars which contains 'PYMAPDL' + printenv | grep "SLURM" # Print env vars which contains 'SLURM' + source /home/user/pymapdl/.venv/bin/activate + python /home/user/pymapdl.py + +- Use PyMAPDL logging to printout valuable information. To activate this, see + :ref:`ref_debug_pymapdl`. + +- In case you need more help, visit :ref:`ref_troubleshooting`. .. _ref_python_venv_not_accesible: @@ -19,44 +50,91 @@ Python virtual environment is not accessible -------------------------------------------- If there is an error while testing the Python installation, it might mean that the Python environment is not accessible to the compute nodes. -For example, in the following output, PyMAPDL could not be found, meaning that the script -is not using the virtual environment (``/home/user/.venv``): +For example, given the following *bash* script `test.sh`: + +.. code-block:: bash + + source /home/user/.venv/bin/activate + python -c "from ansys.mapdl import core as pymapdl; pymapdl.report()" + +The following output is shown after running in the terminal: .. code-block:: console user@machine:~$ srun test.sh + Testing Python! Traceback (most recent call last): File "", line 1, in ImportError: No module named ansys.mapdl -This could be for a number of reasons. One of them is that the system Python distribution -used to create the virtual environment is not accessible from the compute nodes -due to one of these reasons: +As the output shows, PyMAPDL could not be found, meaning that either: + +* The virtual environment does not have PyMAPDL installed. + See :ref:`ref_install_pymapdl_on_hpc`. + +* Or the script did not activate properly the virtual environment + (``/home/user/.venv``). -- The virtual environment has been created in a - directory that is not accessible from the nodes. -- The virtual environment has been created from a Python - executable that is not available to the compute nodes. - Hence, the virtual environment is not activated. For - example, you might be creating the virtual environment - using Python 3.10, but only Python 3.8 is available - from the compute nodes. +For the second reason, there could be a number of reasons. +One of them is that the system Python distribution used to create +the virtual environment is not accessible from the compute nodes +due to one of these reasons: -You can test which Python executable the cluster is using by starting an interactive session in -a compute node with this code: +- The virtual environment has been created in a directory that is + not accessible from the nodes. In this case, your terminal might + also show that the ``activate`` file could not be found. + + .. code-block:: console + + user@machine:~$ srun test.sh + Testing Python! + bash: .venv/bin/activate: No such file or directory + + Depending on your terminal configuration, the preceding error might be + sufficient to exit the terminal process, or not. + If not, the execution continues, and the subsequent ``python`` call is + executed using the default python executable. + It is very likely that the default ``python`` executable does not have + PyMAPDL installed, hence the ``ImportError`` error showed preceding might + appear too. + +- The virtual environment has been created from a Python executable that is + not available to the compute nodes. Hence, the virtual environment is not + activated. + For example, you might be creating the virtual environment Using + Python 3.10, but only Python 3.8 is available from the compute nodes. + You can test which Python executable the cluster is using by starting an + interactive session in a compute node with this code to list all commands + which starts with ``python``: .. code-block:: console user@machine:~$ srun --pty /bin/bash - user@compute_node_01:~$ compgen -c | grep python # List all commands starting with python + user@compute_node_01:~$ compgen -c | grep python .. the approach to solve this comes from: https://stackoverflow.com/questions/64188693/problem-with-python-environment-and-slurm-srun-sbatch +It should be noticed the preceding approach assumes that all the nodes have similar +configuration, hence all of them should have the same Python installations +available. + +It is also convenient to be aware that environment variable modules can be +used to activate Python installations. +For more information, see :ref:`ref_envvar_modules_on_hpc`. + + +.. _ref_envvar_modules_on_hpc: + +Using modules to load Python +---------------------------- + Many HPC infrastructures use environment managers to load and unload -software packages using modules and environment variables. -Hence, you might want to make sure that the correct module is loaded in your script. +software packages using modules and environment variables. +Hence, you might want to make sure that the correct module is loaded in your +script. + For information on two of the most common environment managers, see the `Modules documentation `_ and `Lmod documentation `_. Check your cluster documentation to know which environment @@ -76,12 +154,14 @@ Using the Ansys-provided Python installation **For development purposes only** -In certain HPC environments the possibility of installing a different Python version -is limited for security reasons. In such cases, the Python distribution available in -the Ansys installation can be used. -This Python distribution is a customized Python (CPython) -version for Ansys products use only. Its use is **discouraged** -except for very advanced users and special use cases. +In certain HPC environments the possibility of installing a different Python +version is limited for security reasons. +In such cases, the Python distribution available in the Ansys installation +can be used. +This Python distribution is a customized Python (CPython) version for Ansys +products use only. +Its use is **discouraged** except for very advanced users and special use +cases. This Python distribution is in the following directory, where ``%MAPDL_VERSION%`` is the three-digit Ansys version: @@ -98,7 +178,8 @@ For example, here is the directory for Ansys 2024 R2: In Ansys 2024 R1 and later, the unified installer includes CPython 3.10. -Earlier versions include CPython 3.7 (``/commonfiles/CPython/3_7/linx64/Release/python``). +Earlier versions include CPython 3.7 +(``/commonfiles/CPython/3_7/linx64/Release/python``). Because the Ansys installation must be available to all the compute nodes to run simulations using them, this @@ -116,6 +197,8 @@ the compute nodes: user@machine:~$ export PY_PATH=/ansys_inc/v241/commonfiles/CPython/3_10/linx64/Release/Python + This path needs to be adapted to where Ansys is installed and also which version is used. + #. For only Ansys 2024 R1 and earlier, patch the ``PATH`` and ``LD_LIBRARY_PATH`` environment variables: diff --git a/doc/source/user_guide/mapdl.rst b/doc/source/user_guide/mapdl.rst index bfc59931b5..d60d8e610d 100644 --- a/doc/source/user_guide/mapdl.rst +++ b/doc/source/user_guide/mapdl.rst @@ -1097,83 +1097,106 @@ Environment variables ===================== There are several PyMAPDL-specific environment variables that can be -used to control the behavior or launching of PyMAPDL and MAPDL. +used to control the default behavior of PyMAPDL or launching MAPDL. + +It should be mentioned that these environment variables do not have +priority over the arguments given in the corresponding functions. +For instance: + +.. code-block:: console + + user@machine:~$ export PYMAPDL_PORT=50052 + user@machine:~$ python -c "from ansys.mapdl.core import launch_mapdl; mapdl=launch_mapdl(port=60053)" + +The preceding command launches an MAPDL instance on the port 60053, +because the argument ``port`` has priority over the environment +variable :envvar:`PYMAPDL_PORT`. + These are described in the following table: -+---------------------------------------+---------------------------------------------------------------------+ -| :envvar:`PYMAPDL_START_INSTANCE` | Override the behavior of the | -| | :func:`ansys.mapdl.core.launcher.launch_mapdl` function | -| | to only attempt to connect to existing | -| | instances of PyMAPDL. Generally used | -| | in combination with ``PYMAPDL_PORT``. | -| | | -| | **Example:** | -| | | -| | .. code:: console | -| | | -| | export PYMAPDL_START_INSTANCE=True | -| | | -+---------------------------------------+---------------------------------------------------------------------+ -| :envvar:`PYMAPDL_PORT` | Default port for PyMAPDL to connect to. | -| | | -| | **Example:** | -| | | -| | .. code:: console | -| | | -| | export PYMAPDL_PORT=50052 | -| | | -+---------------------------------------+---------------------------------------------------------------------+ -| :envvar:`PYMAPDL_IP` | Default IP for PyMAPDL to connect to. | -| | | -| | **Example:** | -| | | -| | .. code:: console | -| | | -| | export PYMAPDL_IP=123.45.67.89 | -| | | -+---------------------------------------+---------------------------------------------------------------------+ -| :envvar:`ANSYSLMD_LICENSE_FILE` | License file or IP address with port in the format | -| | ``PORT@IP``. Do not confuse with the ``IP`` and | -| | ``PORT`` where the MAPDL instance is running, which | -| | are specified using :envvar:`PYMAPDL_IP` and | -| | :envvar:`PYMAPDL_PORT`. | -| | This is helpful for supplying licensing for | -| | Docker. | -| | | -| | **Example:** | -| | | -| | .. code:: console | -| | | -| | export ANSYSLMD_LICENSE_FILE=1055@123.45.67.89 | -| | | -+---------------------------------------+---------------------------------------------------------------------+ -| :envvar:`PYMAPDL_MAPDL_EXEC` | Executable path from where to launch MAPDL | -| | instances. | -| | | -| | **Example:** | -| | | -| | .. code:: console | -| | | -| | export PYMAPDL_MAPDL_EXEC=/ansys_inc/v241/ansys/bin/mapdl | -| | | -+---------------------------------------+---------------------------------------------------------------------+ -| :envvar:`PYMAPDL_MAPDL_VERSION` | Default MAPDL version to launch in case there | -| | are several versions availables. | -| | | -| | **Example:** | -| | | -| | .. code:: console | -| | | -| | export PYMAPDL_MAPDL_VERSION=22.2 | -| | | -+---------------------------------------+---------------------------------------------------------------------+ -| :envvar:`PYMAPDL_ON_SLURM` | With this environment variable set to ``FALSE``, you can avoid | -| | PyMAPDL from detecting that it is running on a SLURM HPC cluster. | -+---------------------------------------+---------------------------------------------------------------------+ -| :envvar:`PYMAPDL_MAX_MESSAGE_LENGTH` | Maximum gRPC message length. If your | -| | connection terminates when running | -| | PRNSOL or NLIST, raise this. In bytes, | -| | defaults to 256 MB. | -| | | -| | Only for developing purposes. | -+---------------------------------------+---------------------------------------------------------------------+ ++---------------------------------------+----------------------------------------------------------------------------------+ +| :envvar:`PYMAPDL_START_INSTANCE` | Override the behavior of the | +| | :func:`ansys.mapdl.core.launcher.launch_mapdl` function | +| | to only attempt to connect to existing | +| | instances of PyMAPDL. Generally used | +| | in combination with ``PYMAPDL_PORT``. | +| | | +| | **Example:** | +| | | +| | .. code-block:: console | +| | | +| | user@machine:~$ export PYMAPDL_START_INSTANCE=True | +| | | ++---------------------------------------+----------------------------------------------------------------------------------+ +| :envvar:`PYMAPDL_PORT` | Default port for PyMAPDL to connect to. | +| | | +| | **Example:** | +| | | +| | .. code-block:: console | +| | | +| | user@machine:~$ export PYMAPDL_PORT=50052 | +| | | ++---------------------------------------+----------------------------------------------------------------------------------+ +| :envvar:`PYMAPDL_IP` | Default IP for PyMAPDL to connect to. | +| | | +| | **Example:** | +| | | +| | .. code-block:: console | +| | | +| | user@machine:~$ export PYMAPDL_IP=123.45.67.89 | +| | | ++---------------------------------------+----------------------------------------------------------------------------------+ +| :envvar:`PYMAPDL_NPROC` | Default number of cores for MAPDL to use. | +| | | +| | **Example:** | +| | | +| | .. code-block:: console | +| | | +| | user@machine:~$ export PYMAPDL_NPROC=10 | +| | | ++---------------------------------------+----------------------------------------------------------------------------------+ +| :envvar:`ANSYSLMD_LICENSE_FILE` | License file or IP address with port in the format | +| | ``PORT@IP``. Do not confuse with the ``IP`` and | +| | ``PORT`` where the MAPDL instance is running, which | +| | are specified using :envvar:`PYMAPDL_IP` and | +| | :envvar:`PYMAPDL_PORT`. | +| | This is helpful for supplying licensing for | +| | Docker. | +| | | +| | **Example:** | +| | | +| | .. code-block:: console | +| | | +| | user@machine:~$ export ANSYSLMD_LICENSE_FILE=1055@123.45.89 | +| | | ++---------------------------------------+----------------------------------------------------------------------------------+ +| :envvar:`PYMAPDL_MAPDL_EXEC` | Executable path from where to launch MAPDL | +| | instances. | +| | | +| | **Example:** | +| | | +| | .. code-block:: console | +| | | +| | user@machine:~$ export PYMAPDL_MAPDL_EXEC=/ansys_inc/v241/ansys/bin/mapdl | +| | | ++---------------------------------------+----------------------------------------------------------------------------------+ +| :envvar:`PYMAPDL_MAPDL_VERSION` | Default MAPDL version to launch in case there | +| | are several versions availables. | +| | | +| | **Example:** | +| | | +| | .. code-block:: console | +| | | +| | user@machine:~$ export PYMAPDL_MAPDL_VERSION=22.2 | +| | | ++---------------------------------------+----------------------------------------------------------------------------------+ +| :envvar:`PYMAPDL_ON_SLURM` | With this environment variable set to ``FALSE``, you can avoid | +| | PyMAPDL from detecting that it is running on a SLURM HPC cluster. | ++---------------------------------------+----------------------------------------------------------------------------------+ +| :envvar:`PYMAPDL_MAX_MESSAGE_LENGTH` | Maximum gRPC message length. If your | +| | connection terminates when running | +| | PRNSOL or NLIST, raise this. In bytes, | +| | defaults to 256 MB. | +| | | +| | Only for developing purposes. | ++---------------------------------------+----------------------------------------------------------------------------------+ diff --git a/doc/source/user_guide/troubleshoot.rst b/doc/source/user_guide/troubleshoot.rst index 54cf12d0c7..74a2b63f35 100644 --- a/doc/source/user_guide/troubleshoot.rst +++ b/doc/source/user_guide/troubleshoot.rst @@ -8,6 +8,7 @@ Troubleshooting PyMAPDL To help you resolve any problems that you might have when using PyMAPDL, some of the most common problems and frequently asked questions are posted here. +.. _ref_debug_pymapdl: Debug in PyMAPDL ---------------- diff --git a/src/ansys/mapdl/core/launcher.py b/src/ansys/mapdl/core/launcher.py index 047404f64c..2e6b8bf12c 100644 --- a/src/ansys/mapdl/core/launcher.py +++ b/src/ansys/mapdl/core/launcher.py @@ -106,7 +106,7 @@ "add_env_vars", "replace_env_vars", "version", - "detect_slurm_config", + "detect_HPC", "set_no_abort", "force_intel" # Non documented args @@ -972,8 +972,8 @@ def launch_mapdl( add_env_vars: Optional[Dict[str, str]] = None, replace_env_vars: Optional[Dict[str, str]] = None, version: Optional[Union[int, str]] = None, - detect_slurm_config: bool = True, - **kwargs, + detect_HPC: bool = True, + **kwargs: Dict[str, Any], ) -> Union[MapdlGrpc, "MapdlConsole"]: """Start MAPDL locally. @@ -1001,12 +1001,15 @@ def launch_mapdl( MAPDL jobname. Defaults to ``'file'``. nproc : int, optional - Number of processors. Defaults to 2. + Number of processors. Defaults to 2. If running on an HPC cluster, + this value is adjusted to the number of CPUs allocated to the job, + unless ``detect_HPC`` is set to "false". ram : float, optional - Total size in megabytes of the workspace (memory) used for the initial allocation. - The default is ``None``, in which case 2 GB (2048 MB) is used. To force a fixed size - throughout the run, specify a negative number. + Total size in megabytes of the workspace (memory) used for the initial + allocation. The default is ``None``, in which case 2 GB (2048 MB) is + used. To force a fixed size throughout the run, specify a negative + number. mode : str, optional Mode to launch MAPDL. Must be one of the following: @@ -1139,6 +1142,13 @@ def launch_mapdl( export PYMAPDL_MAPDL_VERSION=22.2 + detect_HPC: bool, optional + Whether detect if PyMAPDL is running on an HPC cluster or not. Currently + only SLURM clusters are supported. By detaul, it is set to true. + This option can be bypassed if the environment variable + ``PYMAPDL_ON_SLURM`` is set to "true". For more information visit + :ref:`ref_hpc_slurm`. + kwargs : dict, optional These keyword arguments are interface specific or for development purposes. See Notes for more details. @@ -1403,6 +1413,10 @@ def launch_mapdl( cleanup_on_exit=args["cleanup_on_exit"], version=args["version"] ) + if args["ON_SLURM"]: + env_vars.setdefault("ANS_MULTIPLE_NODES", "1") + env_vars.setdefault("HYDRA_BOOTSTRAP", "slurm") + # Early exit for debugging. if args["_debug_no_launch"]: # Early exit, just for testing @@ -1742,7 +1756,7 @@ def get_value( # ntasks is for mpi SLURM_NTASKS = get_value("SLURM_NTASKS", kwargs) LOG.info(f"SLURM_NTASKS: {SLURM_NTASKS}") - # Sharing tasks acrros multiple nodes (DMP) + # Sharing tasks across multiple nodes (DMP) # the format of this envvar is a bit tricky. Avoiding it for the moment. # SLURM_TASKS_PER_NODE = int( # kwargs.pop( @@ -1891,7 +1905,7 @@ def is_on_slurm(args: Dict[str, Any]) -> bool: # Let's require the following env vars to exist to go into slurm mode. args["ON_SLURM"] = bool( - args["detect_slurm_config"] + args["detect_HPC"] and not is_flag_false # default is true and os.environ.get("SLURM_JOB_NAME") and os.environ.get("SLURM_JOB_ID") diff --git a/tests/test_launcher.py b/tests/test_launcher.py index e7dc8b4843..63ea33d1d3 100644 --- a/tests/test_launcher.py +++ b/tests/test_launcher.py @@ -495,13 +495,6 @@ def test_launching_on_busy_port(mapdl, monkeypatch): launch_mapdl(port=mapdl.port) -@requires("local") -def test_cpu_checks(): - machine_cores = psutil.cpu_count(logical=False) - with pytest.raises(NotEnoughResources): - launch_mapdl(nproc=machine_cores + 2) - - def test_fail_channel_port(): with pytest.raises(ValueError): launch_mapdl(channel="something", port="something") @@ -610,7 +603,6 @@ def test_fail_channel_ip(): ), pytest.param( { - "PYMAPDL_NPROC": 5, "SLURM_JOB_NAME": "myawesomejob", "SLURM_NTASKS": 2, "SLURM_CPUS_PER_TASK": 2, @@ -619,12 +611,11 @@ def test_fail_channel_ip(): "SLURM_MEM_PER_NODE": None, "SLURM_NODELIST": None, }, - {"nproc": 5, "jobname": "myawesomejob"}, - id="Testing PYMAPDL_NPROC and SLURM_JOB_NAME", + {"nproc": 4, "jobname": "myawesomejob"}, + id="Testing SLURM_JOB_NAME", ), pytest.param( { - "PYMAPDL_NPROC": 5, "SLURM_JOB_NAME": "myawesomejob", "SLURM_NTASKS": 2, "SLURM_CPUS_PER_TASK": 2, @@ -634,8 +625,8 @@ def test_fail_channel_ip(): "SLURM_NODELIST": None, "PYMAPDL_MAPDL_EXEC": "asdf/qwer/poiu", }, - {"nproc": 5, "jobname": "myawesomejob", "exec_file": "asdf/qwer/poiu"}, - id="Testing PYMAPDL_NPROC and SLURM_JOB_NAME", + {"nproc": 4, "jobname": "myawesomejob", "exec_file": "asdf/qwer/poiu"}, + id="Testing PYMAPDL_MAPDL_EXEC and SLURM_JOB_NAME", ), ), indirect=["set_env_var_context"], @@ -705,17 +696,17 @@ def test_slurm_ram(monkeypatch, ram, expected, context): @pytest.mark.parametrize("slurm_env_var", ["True", "false", ""]) @pytest.mark.parametrize("slurm_job_name", ["True", "false", ""]) @pytest.mark.parametrize("slurm_job_id", ["True", "false", ""]) -@pytest.mark.parametrize("detect_slurm_config", [True, False, None]) +@pytest.mark.parametrize("detect_HPC", [True, False, None]) def test_is_on_slurm( - monkeypatch, slurm_env_var, slurm_job_name, slurm_job_id, detect_slurm_config + monkeypatch, slurm_env_var, slurm_job_name, slurm_job_id, detect_HPC ): monkeypatch.setenv("PYMAPDL_ON_SLURM", slurm_env_var) monkeypatch.setenv("SLURM_JOB_NAME", slurm_job_name) monkeypatch.setenv("SLURM_JOB_ID", slurm_job_id) - flag = is_on_slurm(args={"detect_slurm_config": detect_slurm_config}) + flag = is_on_slurm(args={"detect_HPC": detect_HPC}) - if detect_slurm_config is not True: + if detect_HPC is not True: assert not flag else: @@ -731,7 +722,7 @@ def test_is_on_slurm( if ON_LOCAL: assert ( launch_mapdl( - detect_slurm_config=detect_slurm_config, + detect_HPC=detect_HPC, _debug_no_launch=True, )["ON_SLURM"] == flag @@ -895,6 +886,26 @@ def mycpucount(**kwargs): return 10 # faking 10 cores +@patch("psutil.cpu_count", mycpucount) +def test_nproc_envvar(monkeypatch): + monkeypatch.setenv("PYMAPDL_NPROC", 10) + args = launch_mapdl(_debug_no_launch=True) + assert args["nproc"] == 10 + + +@pytest.mark.parametrize("nproc", [None, 5, 9, 15]) +@patch("psutil.cpu_count", mycpucount) +def test_nproc(monkeypatch, nproc): + monkeypatch.delenv("PYMAPDL_START_INSTANCE", False) + + if nproc and nproc > mycpucount(): + with pytest.raises(NotEnoughResources): + launch_mapdl(nproc=nproc, _debug_no_launch=True) + else: + args = launch_mapdl(nproc=nproc, _debug_no_launch=True) + assert args["nproc"] == (nproc or 2) + + @patch("os.name", "nt") @patch("psutil.cpu_count", mycpucount) def test_generate_mapdl_launch_command_windows(): From e4cc11ea631a67e920137f3ef63fcdabcd3155ca Mon Sep 17 00:00:00 2001 From: Camille <78221213+clatapie@users.noreply.github.com> Date: Wed, 23 Oct 2024 09:41:09 +0200 Subject: [PATCH 26/33] refactor: modifying ``subprocess`` calls and removing ``try except continue`` statements (#3474) * fix: update ``.gitignore`` file * fix: fixing minor vulnerabilities * fix: try/except/pass vulnerabilities * fix: ``subprocess`` calls * chore: adding changelog file 3474.maintenance.md [dependabot-skip] * ci: auto fixes from pre-commit.com hooks. for more information, see https://pre-commit.ci * fix: test * ci: auto fixes from pre-commit.com hooks. for more information, see https://pre-commit.ci * chore: adding changelog file 3474.added.md [dependabot-skip] * fix: removing pipe in shell command * ci: auto fixes from pre-commit.com hooks. for more information, see https://pre-commit.ci * fix: pre-commit * test: removing subprocess change * revert: reverting ``subprocess`` changes * fix: import error * fix: import error with minimal requirements * fix: warning issue * fix: ``_retrieve_file`` * fix: ``test_failed_download`` * ci: auto fixes from pre-commit.com hooks. for more information, see https://pre-commit.ci * fix: ``test_examples.py`` * fix: ``download_examples`` * fix: adding ``requests`` in ``minimum_requirements`` and reverting changes in ``_launch`` method * ci: auto fixes from pre-commit.com hooks. for more information, see https://pre-commit.ci * fix: typo * fix: removing a warning * feat: removing ``shell=True`` * fix: attemp to fix the ``PermissionError`` * test: providing higher permissions to run the exec file * test: using Python 3.12 in CICD for better understanding of the output * fix: removing `"` character * fix: adding `'` character * fix: reverting changes in subprocess * fix: checking whether other changes are correct or not * fix: `` test__is_ubuntu`` * fix: using ``executable`` in ``subprocess.Popen`` * fix: ``proc`` * fix: attempt to fix test errors * fix: fix typo * fix: ``_is_ubuntu`` * fix: testing another approach for ``is_ubuntu`` method * fix: attempt to use ``shell=False`` in ``launch_grpc`` * fix: removing empty args * test: attempt to fix ``command_parm`` * test: attempt to fix ``command_parm`` - 2 * ci: auto fixes from pre-commit.com hooks. for more information, see https://pre-commit.ci * Update src/ansys/mapdl/core/examples/downloads.py Co-authored-by: Roberto Pastor Muela <37798125+RobPasMue@users.noreply.github.com> * test: attempt to fix ``command_parm`` - 3 * fix: ``call`` in ``open_gui`` * fix: removing unused variable ``e`` * revert: reverting change for ``MAIN_PYTHON_VERSION`` * chore: adding changelog file 3474.added.md [dependabot-skip] * Apply suggestions from code review Co-authored-by: German <28149841+germa89@users.noreply.github.com> * Apply suggestions from code review Co-authored-by: German <28149841+germa89@users.noreply.github.com> * test: fix tests * test: reverting change in ``f'"{exec_file}"'`` * fix: ``exec_file`` errors * review: applying @germa89's suggestions * Apply suggestions from code review Co-authored-by: German <28149841+germa89@users.noreply.github.com> * review: applying @germa89's suggestions * Update src/ansys/mapdl/core/mapdl_core.py Co-authored-by: German <28149841+germa89@users.noreply.github.com> * review: applying @germa89's suggestions - 2 * fux: using ``self._log.debug`` * fix: ``test_examples.py`` * fix: ``generate_mapdl_launch_command`` * fix: ``_checkout_license`` * fix: ``launch_grpc`` for Linux * Apply suggestions from code review Co-authored-by: Roberto Pastor Muela <37798125+RobPasMue@users.noreply.github.com> * review: applying @germa89 and @RobPasMue's suggestions * review: applying code reviewers suggestions * fix: using ``cwd`` arg in ``subprocess.call`` --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Roberto Pastor Muela <37798125+RobPasMue@users.noreply.github.com> Co-authored-by: German <28149841+germa89@users.noreply.github.com> --- doc/changelog.d/3474.added.md | 1 + minimum_requirements.txt | 2 +- src/ansys/mapdl/core/examples/downloads.py | 48 +++++++++++----------- src/ansys/mapdl/core/launcher.py | 26 ++++++------ src/ansys/mapdl/core/licensing.py | 3 +- src/ansys/mapdl/core/mapdl_console.py | 5 ++- src/ansys/mapdl/core/mapdl_core.py | 26 ++++++++---- src/ansys/mapdl/core/mapdl_extended.py | 8 ++-- src/ansys/mapdl/core/mapdl_grpc.py | 9 ++-- src/ansys/mapdl/core/misc.py | 5 ++- src/ansys/mapdl/core/pool.py | 16 ++++---- src/ansys/mapdl/core/xpl.py | 13 ++---- tests/test_cli.py | 13 ++++-- tests/test_examples.py | 5 ++- tests/test_launcher.py | 13 ++++-- 15 files changed, 109 insertions(+), 84 deletions(-) create mode 100644 doc/changelog.d/3474.added.md diff --git a/doc/changelog.d/3474.added.md b/doc/changelog.d/3474.added.md new file mode 100644 index 0000000000..ea01459e8f --- /dev/null +++ b/doc/changelog.d/3474.added.md @@ -0,0 +1 @@ +refactor: modifying ``subprocess`` calls and removing ``try except continue`` statements \ No newline at end of file diff --git a/minimum_requirements.txt b/minimum_requirements.txt index 7df708dc81..ee9008ac42 100644 --- a/minimum_requirements.txt +++ b/minimum_requirements.txt @@ -3,4 +3,4 @@ importlib-metadata==8.5.0 numpy==2.1.2 platformdirs==4.3.6 psutil==6.1.0 -pyansys-tools-versioning==0.6.0 +pyansys-tools-versioning==0.6.0 \ No newline at end of file diff --git a/src/ansys/mapdl/core/examples/downloads.py b/src/ansys/mapdl/core/examples/downloads.py index e8ba2f59f6..67c793e091 100644 --- a/src/ansys/mapdl/core/examples/downloads.py +++ b/src/ansys/mapdl/core/examples/downloads.py @@ -25,13 +25,13 @@ from functools import wraps import os import shutil -import urllib.request import zipfile try: import requests _HAS_REQUESTS = True + except ModuleNotFoundError: _HAS_REQUESTS = False @@ -83,14 +83,8 @@ def _get_file_url(filename, directory=None): def _check_url_exist(url): - if not _HAS_REQUESTS: - raise ModuleNotFoundError("Examples module requires request module") - - response = requests.get(url) - if response.status_code == 200: - return [True] - else: - return [False] + response = requests.get(url, timeout=10) # 10 seconds timeout + return response.status_code == 200 @check_directory_exist(pymapdl.EXAMPLES_PATH) @@ -103,24 +97,28 @@ def _retrieve_file(url, filename, _test=False): local_path = os.path.join(pymapdl.EXAMPLES_PATH, os.path.basename(filename)) local_path_no_zip = local_path.replace(".zip", "") if os.path.isfile(local_path_no_zip) or os.path.isdir(local_path_no_zip): - return local_path_no_zip, None + return local_path_no_zip # Perform download - saved_file, resp = urllib.request.urlretrieve(url) - shutil.move(saved_file, local_path) + requested_file = requests.get(url, timeout=10) + requested_file.raise_for_status() + + with open(local_path, "wb") as f: + f.write(requested_file.content) + if get_ext(local_path) in [".zip"]: _decompress(local_path) local_path = local_path[:-4] - return local_path, resp + return local_path def _download_file(filename, directory=None, _test=False): url = _get_file_url(filename, directory) try: return _retrieve_file(url, filename, _test) - except Exception as e: # Genering exception - raise RuntimeError( - "For the reason mentioned below, retrieving the file from internet failed.\n" + except requests.exceptions.HTTPError as e: + raise requests.exceptions.HTTPError( + "Retrieving the file from internet failed.\n" "You can download this file from:\n" f"{url}\n" "\n" @@ -140,27 +138,27 @@ def download_bracket(): '/home/user/.local/share/ansys_mapdl_core/examples/bracket.iges' """ - return _download_file("bracket.iges", "geometry")[0] + return _download_file("bracket.iges", "geometry") def download_tech_demo_data(example, filename): """Download Tech Demos external data.""" example = "tech_demos/" + example - return _download_file(filename=filename, directory=example)[0] + return _download_file(filename=filename, directory=example) def download_vtk_rotor(): """Download rotor vtk file.""" - return _download_file("rotor.vtk", "geometry")[0] + return _download_file("rotor.vtk", "geometry") def _download_rotor_tech_demo_vtk(): """Download the rotor surface VTK file.""" - return _download_file("rotor2.vtk", "geometry")[0] + return _download_file("rotor2.vtk", "geometry") def download_example_data(filename, directory=None): - return _download_file(filename, directory=directory)[0] + return _download_file(filename, directory=directory) def download_manifold_example_data() -> dict: @@ -188,10 +186,10 @@ def download_manifold_example_data() -> dict: return { "geometry": _download_file( filename="manifold_geometry.anf", directory=files_dir - )[0], + ), "mapping_data": _download_file( filename="manifold_cht-final_temp.csv", directory=files_dir - )[0], + ), } @@ -219,6 +217,6 @@ def download_cfx_mapping_example_data() -> dict: return { "data": _download_file( filename="11_blades_mode_1_ND_0.csv", directory=files_dir - )[0], - "model": _download_file(filename="ExampleMapping.db", directory=files_dir)[0], + ), + "model": _download_file(filename="ExampleMapping.db", directory=files_dir), } diff --git a/src/ansys/mapdl/core/launcher.py b/src/ansys/mapdl/core/launcher.py index 2e6b8bf12c..5a827e9bae 100644 --- a/src/ansys/mapdl/core/launcher.py +++ b/src/ansys/mapdl/core/launcher.py @@ -166,8 +166,7 @@ def _is_ubuntu() -> bool: return False proc = subprocess.Popen( - "awk -F= '/^NAME/{print $2}' /etc/os-release", - shell=True, + ["awk", "-F=", "/^NAME/{print $2}", "/etc/os-release"], stdout=subprocess.PIPE, ) if "ubuntu" in proc.stdout.read().decode().lower(): @@ -309,7 +308,7 @@ def generate_mapdl_launch_command( ram: Optional[int] = None, port: int = MAPDL_DEFAULT_PORT, additional_switches: str = "", -) -> str: +) -> list[str]: """Generate the command line to start MAPDL in gRPC mode. Parameters @@ -346,7 +345,7 @@ def generate_mapdl_launch_command( Returns ------- - str + list[str] Command """ @@ -369,10 +368,10 @@ def generate_mapdl_launch_command( # Windows will spawn a new window, special treatment if os.name == "nt": + exec_file = f'"{exec_file}"' # must start in batch mode on windows to hide APDL window tmp_inp = ".__tmp__.inp" command_parm = [ - '"%s"' % exec_file, job_sw, cpu_sw, ram_sw, @@ -388,7 +387,6 @@ def generate_mapdl_launch_command( else: # linux command_parm = [ - '"%s"' % exec_file, job_sw, cpu_sw, ram_sw, @@ -398,16 +396,19 @@ def generate_mapdl_launch_command( ] command_parm = [ - each for each in command_parm if command_parm + each for each in command_parm if each.strip() ] # cleaning empty args. - command = " ".join(command_parm) - LOG.debug(f"Generated command: {command}") - return command + # removing spaces in cells + command_parm = " ".join(command_parm).split(" ") + command_parm.insert(0, f"{exec_file}") + + LOG.debug(f"Generated command: {' '.join(command_parm)}") + return command_parm def launch_grpc( - cmd: str, + cmd: list[str], run_location: str = None, env_vars: Optional[Dict[str, str]] = None, ) -> subprocess.Popen: @@ -442,7 +443,7 @@ def launch_grpc( if os.name == "nt": # getting tmp file name - tmp_inp = cmd.split()[cmd.split().index("-i") + 1] + tmp_inp = cmd[cmd.index("-i") + 1] with open(os.path.join(run_location, tmp_inp), "w") as f: f.write("FINISH\r\n") LOG.debug(f"Writing temporary input file: {tmp_inp} with 'FINISH' command.") @@ -450,7 +451,6 @@ def launch_grpc( LOG.debug("MAPDL starting in background.") process = subprocess.Popen( cmd, - shell=os.name != "nt", cwd=run_location, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, diff --git a/src/ansys/mapdl/core/licensing.py b/src/ansys/mapdl/core/licensing.py index 28c68bea3b..2c82e34f38 100644 --- a/src/ansys/mapdl/core/licensing.py +++ b/src/ansys/mapdl/core/licensing.py @@ -329,11 +329,10 @@ def _checkout_license(self, lic, host=None, port=2325): tstart = time.time() process = subprocess.Popen( - f'"{ansysli_util_path}" -checkout {lic}', + [f'"{ansysli_util_path}"', "-checkout", f"{lic}"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, - shell=True, ) output = process.stdout.read().decode() diff --git a/src/ansys/mapdl/core/mapdl_console.py b/src/ansys/mapdl/core/mapdl_console.py index b39fa57fea..d3e1c4f678 100644 --- a/src/ansys/mapdl/core/mapdl_console.py +++ b/src/ansys/mapdl/core/mapdl_console.py @@ -28,6 +28,7 @@ import re import time +from ansys.mapdl.core import LOG from ansys.mapdl.core.errors import MapdlExitedError, MapdlRuntimeError from ansys.mapdl.core.mapdl import MapdlBase from ansys.mapdl.core.misc import requires_package @@ -284,8 +285,8 @@ def exit(self, close_log=True, timeout=3): try: self._process.sendline("FINISH") self._process.sendline("EXIT") - except: - pass + except Exception as e: + LOG.warning(f"Unable to exit ANSYS MAPDL: {e}") if close_log: self._close_apdl_log() diff --git a/src/ansys/mapdl/core/mapdl_core.py b/src/ansys/mapdl/core/mapdl_core.py index 8fec578281..f1aa990d15 100644 --- a/src/ansys/mapdl/core/mapdl_core.py +++ b/src/ansys/mapdl/core/mapdl_core.py @@ -500,8 +500,10 @@ def directory(self) -> str: while (not self._path and i > 5) or i == 0: try: self._path = self.inquire("", "DIRECTORY") - except Exception: # pragma: no cover - pass + except Exception as e: # pragma: no cover + logger.warning( + f"Failed to get the directory due to the following error: {e}" + ) i += 1 if not self._path: # pragma: no cover time.sleep(0.1) @@ -676,8 +678,8 @@ def jobname(self) -> str: """ try: self._jobname = self.inquire("", "JOBNAME") - except Exception: - pass + except Exception as e: + logger.warning(f"Failed to get the jobname due to the following error: {e}") return self._jobname @jobname.setter @@ -1693,11 +1695,21 @@ def open_gui(self, include_result=None, inplace=None): # pragma: no cover "MAPDL GUI has been opened using 'inplace' kwarg. " f"The changes you make will overwrite the files in {run_dir}." ) + add_sw = add_sw.split() + exec_array = [ + f"{exec_file}", + "-g", + "-j", + f"{name}", + "-np", + f"{nproc}", + *add_sw, + ] call( - f'cd "{run_dir}" && "{exec_file}" -g -j {name} -np {nproc} {add_sw}', - shell=True, + exec_array, stdout=DEVNULL, + cwd=run_dir, ) # Going back @@ -2298,7 +2310,7 @@ def __del__(self): try: # logger might be closed if self._log is not None: self._log.error("exit: %s", str(e)) - except Exception: + except ValueError: pass def _cleanup_loggers(self): diff --git a/src/ansys/mapdl/core/mapdl_extended.py b/src/ansys/mapdl/core/mapdl_extended.py index abdd55c027..0e5c859eb8 100644 --- a/src/ansys/mapdl/core/mapdl_extended.py +++ b/src/ansys/mapdl/core/mapdl_extended.py @@ -807,9 +807,11 @@ def aplot( for surf in surfs: anum = np.unique(surf["entity_num"]) - assert ( - len(anum) == 1 - ), f"The pv.Unstructured from the entity {anum[0]} contains entities from other entities {anum}" # Sanity check + if len(anum) != 1: + raise RuntimeError( + f"The pv.Unstructured from the entity {anum[0]} contains entities" + f"from other entities {anum}" # Sanity check + ) area = surf.extract_cells(surf["entity_num"] == anum) centers.append(area.center) diff --git a/src/ansys/mapdl/core/mapdl_grpc.py b/src/ansys/mapdl/core/mapdl_grpc.py index 28210b8f40..35ef630f0a 100644 --- a/src/ansys/mapdl/core/mapdl_grpc.py +++ b/src/ansys/mapdl/core/mapdl_grpc.py @@ -881,8 +881,9 @@ def _launch(self, start_parm, timeout=10): self.prep7() success = True break - except: - pass + except MapdlRuntimeError: + time.sleep(1) + warn("PyMAPDL is taking longer than expected to connect to the server.") if not success: raise MapdlConnectionError("Unable to reconnect to MAPDL") @@ -1025,7 +1026,7 @@ def _send_command_stream(self, cmd, verbose=False) -> str: return "".join(response) def _threaded_heartbeat(self): - """To be called from a thread to verify mapdl instance is alive""" + """To be called from a thread to verify MAPDL instance is alive""" self._initialised.set() while True: if self._exited: @@ -1038,7 +1039,7 @@ def _threaded_heartbeat(self): except ReferenceError: break except Exception: - continue + self._log.debug("Checking if MAPDL instance is still alive.") @protect_from(ValueError, "I/O operation on closed file.") def exit(self, save=False, force=False, **kwargs): diff --git a/src/ansys/mapdl/core/misc.py b/src/ansys/mapdl/core/misc.py index efbc53295a..62fa90aa4a 100644 --- a/src/ansys/mapdl/core/misc.py +++ b/src/ansys/mapdl/core/misc.py @@ -28,7 +28,6 @@ import os from pathlib import Path import platform -import random import socket import string import tempfile @@ -378,7 +377,9 @@ def is_float(input_string): def random_string(stringLength=10, letters=string.ascii_lowercase): """Generate a random string of fixed length""" - return "".join(random.choice(letters) for i in range(stringLength)) + import secrets + + return "".join(secrets.choice(letters) for _ in range(stringLength)) def _check_has_ansys(): diff --git a/src/ansys/mapdl/core/pool.py b/src/ansys/mapdl/core/pool.py index 8e45a424a9..c1b7f1204d 100755 --- a/src/ansys/mapdl/core/pool.py +++ b/src/ansys/mapdl/core/pool.py @@ -562,8 +562,8 @@ def run(): if not complete[0]: try: obj.exit() - except: - pass + except MapdlRuntimeError: + LOG.warning(f"Unable to delete the object {obj}") # ensure that the directory is cleaned up if obj._cleanup: @@ -572,11 +572,9 @@ def run(): if os.path.isdir(obj.directory): try: shutil.rmtree(obj.directory) - except Exception as e: + except OSError as e: LOG.warning( - "Unable to remove directory at %s:\n%s", - obj.directory, - str(e), + f"Unable to remove directory at {obj.directory}:\n{e}" ) obj.locked = False @@ -837,8 +835,10 @@ def threaded_exit(index, instance): self._exiting_i += 1 try: instance.exit() - except: - pass + except MapdlRuntimeError as e: + LOG.warning( + f"Unable to exit instance {index} because of the following reason:\n{str(e)}" + ) self._instances[index] = None # LOG.debug("Exited instance: %s", str(instance)) self._exiting_i -= 1 diff --git a/src/ansys/mapdl/core/xpl.py b/src/ansys/mapdl/core/xpl.py index a49d180adb..76d6086039 100644 --- a/src/ansys/mapdl/core/xpl.py +++ b/src/ansys/mapdl/core/xpl.py @@ -23,8 +23,6 @@ """Contains the ansXpl class.""" import json import pathlib -import random -import string import weakref from ansys.api.mapdl.v0 import mapdl_pb2 @@ -32,12 +30,7 @@ from .common_grpc import ANSYS_VALUE_TYPE from .errors import MapdlRuntimeError - - -def id_generator(size=6, chars=string.ascii_uppercase): - """Generate a random string using only uppercase letters.""" - return "".join(random.choice(chars) for _ in range(size)) - +from .misc import random_string MYCTYPE = { np.int32: "I", @@ -446,7 +439,7 @@ def extract(self, recordname, sets="ALL", asarray=False): if recordname.upper() != "NSL": raise ValueError("Currently, the only supported recordname is 'NSL'") - rand_name = id_generator() + rand_name = random_string(stringLength=6) self._mapdl._log.info( "Calling MAPDL to extract the %s matrix from %s", recordname, @@ -497,7 +490,7 @@ def read(self, recordname, asarray=False): """ from ansys.math.core.math import AnsMath - rand_name = id_generator() + rand_name = random_string(stringLength=6) response = self._mapdl.run(f"*XPL,READ,{recordname},{rand_name}") self._check_ignored(response) data_info = self._mapdl._data_info(rand_name) diff --git a/tests/test_cli.py b/tests/test_cli.py index 09ae9dbb40..bd3c9377b0 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -224,10 +224,17 @@ def test_convert(run_cli, tmpdir): @requires("click") def test_convert_pipe(): - cmd = """echo /prep7 | pymapdl convert """ + cmd = ["echo", "/prep7"] + cmd2 = ["pymapdl", "convert"] - out = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) - stdout = out.stdout.read().decode() + process_echo = subprocess.Popen(cmd, stdout=subprocess.PIPE) + process_pymapdl = subprocess.Popen( + cmd2, stdin=process_echo.stdout, stdout=subprocess.PIPE + ) + + process_echo.stdout.close() + + stdout = process_pymapdl.stdout.read().decode() assert "mapdl.prep7" in stdout assert "Script generated by ansys-mapdl-core version" in stdout diff --git a/tests/test_examples.py b/tests/test_examples.py index 5a1851f98d..cbe980656e 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -110,9 +110,12 @@ def test_download_example_data_true_download(): assert os.path.exists(path) +@requires("requests") def test_failed_download(running_test): + from requests.exceptions import HTTPError + filename = "non_existing_file" - with pytest.raises(RuntimeError): + with pytest.raises(HTTPError): with running_test(active=False): # To force downloading the file _download_file(filename, directory=None) diff --git a/tests/test_launcher.py b/tests/test_launcher.py index 63ea33d1d3..b0823533be 100644 --- a/tests/test_launcher.py +++ b/tests/test_launcher.py @@ -927,6 +927,10 @@ def test_generate_mapdl_launch_command_windows(): additional_switches=additional_switches, ) + assert isinstance(cmd, list) + assert all([isinstance(each, str) for each in cmd]) + + cmd = " ".join(cmd) assert f'"{exec_file}" ' in cmd assert f" -j {jobname} " in cmd assert f" -port {port} " in cmd @@ -956,8 +960,11 @@ def test_generate_mapdl_launch_command_linux(): ram=ram, additional_switches=additional_switches, ) + assert isinstance(cmd, list) + assert all([isinstance(each, str) for each in cmd]) - assert f'"{exec_file}" ' in cmd + cmd = " ".join(cmd) + assert f"{exec_file} " in cmd assert f" -j {jobname} " in cmd assert f" -port {port} " in cmd assert f" -m {ram*1024} " in cmd @@ -1108,17 +1115,17 @@ def fake_subprocess_open(*args, **kwargs): @patch("os.name", "nt") @patch("subprocess.Popen", fake_subprocess_open) def test_launch_grpc(tmpdir): - cmd = "ansys.exe -b -i my_input.inp -o my_output.inp" + cmd = "ansys.exe -b -i my_input.inp -o my_output.inp".split() run_location = str(tmpdir) kwags = launch_grpc(cmd, run_location) inp_file = os.path.join(run_location, "my_input.inp") + assert os.path.exists(inp_file) with open(inp_file, "r") as fid: assert "FINISH" in fid.read() assert cmd == kwags["cmd"] - assert not kwags["shell"] assert "TRUE" == kwags["env"].pop("ANS_CMD_NODIAG") assert not kwags["env"] assert isinstance(kwags["stdin"], type(subprocess.DEVNULL)) From 2ba2b80d75d0a1d431f0b4647b8df8967d12dc14 Mon Sep 17 00:00:00 2001 From: Camille <78221213+clatapie@users.noreply.github.com> Date: Wed, 23 Oct 2024 16:17:07 +0200 Subject: [PATCH 27/33] ci: ``ansys/actions/check-vulnerabilities`` to CI-CD (#3505) * add: ``ansys/actions/check-vulnerabilities`` action to cicd * chore: adding changelog file 3505.maintenance.md [dependabot-skip] * fix: ignoring some bandit warnings and adding reasons * Update .github/workflows/ci.yml Co-authored-by: German <28149841+germa89@users.noreply.github.com> * Apply suggestions from code review Co-authored-by: Roberto Pastor Muela <37798125+RobPasMue@users.noreply.github.com> * Update .github/workflows/ci.yml --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> Co-authored-by: German <28149841+germa89@users.noreply.github.com> Co-authored-by: Roberto Pastor Muela <37798125+RobPasMue@users.noreply.github.com> --- .github/workflows/ci.yml | 11 +++++++++++ doc/changelog.d/3505.maintenance.md | 1 + src/ansys/mapdl/core/launcher.py | 22 +++++++++++++++++----- src/ansys/mapdl/core/licensing.py | 9 +++++++-- src/ansys/mapdl/core/mapdl_core.py | 15 +++++++++++++-- src/ansys/mapdl/core/mapdl_grpc.py | 5 ++++- 6 files changed, 53 insertions(+), 10 deletions(-) create mode 100644 doc/changelog.d/3505.maintenance.md diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5abb95986c..14e8de1f91 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -137,6 +137,17 @@ jobs: python -c "from pyvista.plotting import system_supports_plotting; print('System support plotting ' + str(system_supports_plotting()))" + check-vulnerabilities: + name: "Check library vulnerabilities" + runs-on: ubuntu-latest + steps: + - uses: ansys/actions/check-vulnerabilities@v8 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} + python-package-name: ${{ env.PACKAGE_NAME }} + dev-mode: ${{ github.ref != 'refs/heads/main' }} + docs-build: name: "Build documentation" runs-on: ubuntu-latest diff --git a/doc/changelog.d/3505.maintenance.md b/doc/changelog.d/3505.maintenance.md new file mode 100644 index 0000000000..b995717304 --- /dev/null +++ b/doc/changelog.d/3505.maintenance.md @@ -0,0 +1 @@ +ci: ``ansys/actions/check-vulnerabilities`` to CI-CD \ No newline at end of file diff --git a/src/ansys/mapdl/core/launcher.py b/src/ansys/mapdl/core/launcher.py index 5a827e9bae..b8a49ddaa7 100644 --- a/src/ansys/mapdl/core/launcher.py +++ b/src/ansys/mapdl/core/launcher.py @@ -28,7 +28,10 @@ from queue import Empty, Queue import re import socket -import subprocess + +# Subprocess is needed to start the backend. But +# the input is controlled by the library. Excluding bandit check. +import subprocess # nosec B404 import threading import time from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union @@ -161,14 +164,18 @@ def _is_ubuntu() -> bool: word "ubuntu" in it. """ + # must be running linux for this to be True if os.name != "posix": return False + # args value is controlled by the library. + # awk is not a partial path - Bandit false positive. + # Excluding bandit check. proc = subprocess.Popen( ["awk", "-F=", "/^NAME/{print $2}", "/etc/os-release"], stdout=subprocess.PIPE, - ) + ) # nosec B603 B607 if "ubuntu" in proc.stdout.read().decode().lower(): return True @@ -449,6 +456,9 @@ def launch_grpc( LOG.debug(f"Writing temporary input file: {tmp_inp} with 'FINISH' command.") LOG.debug("MAPDL starting in background.") + + # cmd is controlled by the library with generate_mapdl_launch_command. + # Excluding bandit check. process = subprocess.Popen( cmd, cwd=run_location, @@ -456,7 +466,7 @@ def launch_grpc( stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env_vars, - ) + ) # nosec B603 return process @@ -1711,10 +1721,12 @@ def _get_windows_host_ip(): def _run_ip_route(): - from subprocess import run try: - p = run(["ip", "route"], capture_output=True) + # args value is controlled by the library. + # ip is not a partial path - Bandit false positive + # Excluding bandit check. + p = subprocess.run(["ip", "route"], capture_output=True) # nosec B603 B607 except Exception: LOG.debug( "Detecting the IP address of the host Windows machine requires being able to execute the command 'ip route'." diff --git a/src/ansys/mapdl/core/licensing.py b/src/ansys/mapdl/core/licensing.py index 2c82e34f38..3ae21047b4 100644 --- a/src/ansys/mapdl/core/licensing.py +++ b/src/ansys/mapdl/core/licensing.py @@ -24,7 +24,10 @@ import os import socket -import subprocess + +# Subprocess is needed to start the backend. But +# the input is controlled by the library. Excluding bandit check. +import subprocess # nosec B404 import time from ansys.mapdl.core import _HAS_ATP, LOG @@ -328,12 +331,14 @@ def _checkout_license(self, lic, host=None, port=2325): env["ANS_FLEXLM_DISABLE_DEFLICPATH"] = "TRUE" tstart = time.time() + # ansysli_util_path is controlled by the library. + # Excluding bandit check. process = subprocess.Popen( [f'"{ansysli_util_path}"', "-checkout", f"{lic}"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, - ) + ) # nosec B603 output = process.stdout.read().decode() t_elap = time.time() - tstart diff --git a/src/ansys/mapdl/core/mapdl_core.py b/src/ansys/mapdl/core/mapdl_core.py index f1aa990d15..c380cda782 100644 --- a/src/ansys/mapdl/core/mapdl_core.py +++ b/src/ansys/mapdl/core/mapdl_core.py @@ -30,7 +30,10 @@ import pathlib import re from shutil import copyfile, rmtree -from subprocess import DEVNULL, call + +# Subprocess is needed to start the backend. But +# the input is controlled by the library. Excluding bandit check. +from subprocess import DEVNULL, call # nosec B404 import tempfile import time from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Tuple, Union @@ -1696,6 +1699,13 @@ def open_gui(self, include_result=None, inplace=None): # pragma: no cover f"The changes you make will overwrite the files in {run_dir}." ) add_sw = add_sw.split() + + # Ensure exec_file is a file + try: + pathlib.Path(exec_file).is_file() + except FileNotFoundError: + raise FileNotFoundError("The executable file for ANSYS was not found. ") + exec_array = [ f"{exec_file}", "-g", @@ -1706,11 +1716,12 @@ def open_gui(self, include_result=None, inplace=None): # pragma: no cover *add_sw, ] + # exec_array is controlled by the library. Excluding bandit check. call( exec_array, stdout=DEVNULL, cwd=run_dir, - ) + ) # nosec B603 # Going back os.chdir(cwd) diff --git a/src/ansys/mapdl/core/mapdl_grpc.py b/src/ansys/mapdl/core/mapdl_grpc.py index 35ef630f0a..ea0aac63d8 100644 --- a/src/ansys/mapdl/core/mapdl_grpc.py +++ b/src/ansys/mapdl/core/mapdl_grpc.py @@ -31,7 +31,10 @@ import pathlib import re import shutil -from subprocess import Popen + +# Subprocess is needed to start the backend. But +# the input is controlled by the library. Excluding bandit check. +from subprocess import Popen # nosec B404 import tempfile import threading import time From c0fa797a7bbfe6a34a40f24cda08e25917608e65 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Wed, 23 Oct 2024 19:25:52 +0200 Subject: [PATCH 28/33] docs: review of documenting using pymapdl on clusters (#3466) (#3506) * docs: documenting using pymapdl on clusters (#3466) * feat: adding env vars needed for multinode * feat: adding env vars needed for multinode * feat: renaming hpc detection argument * docs: adding documentation * chore: adding changelog file 3466.documentation.md * feat: adding env vars needed for multinode * feat: renaming hpc detection argument * docs: adding documentation * chore: adding changelog file 3466.documentation.md * fix: vale issues * chore: To fix sphinx build Squashed commit of the following: commit c1d1a3ea278e6461bcc91e1c965f6e6a46d00bc3 Author: German <28149841+germa89@users.noreply.github.com> Date: Mon Oct 7 15:33:19 2024 +0200 ci: retrigger CICD commit b7b5c30a422413d203a31f5a29b7e57f93a0ab08 Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon Oct 7 13:31:55 2024 +0000 ci: auto fixes from pre-commit.com hooks. for more information, see https://pre-commit.ci commit 32a1c0203fc5101f429aafafba26a28cc06bf24c Author: Revathy Venugopal <104772255+Revathyvenugopal162@users.noreply.github.com> Date: Mon Oct 7 15:31:24 2024 +0200 fix: add suggestions Co-authored-by: German <28149841+germa89@users.noreply.github.com> commit 575a219ef8b135b234f2ec5f24a9585298845eca Merge: f2afe139f be1be2e2c Author: Revathyvenugopal162 Date: Mon Oct 7 15:09:01 2024 +0200 Merge branch 'fix/add-build-cheatsheet-as-env-varaible' of https://github.com/ansys/pymapdl into fix/add-build-cheatsheet-as-env-varaible commit f2afe139f693f4f1979506662c514692280487a9 Author: Revathyvenugopal162 Date: Mon Oct 7 15:08:58 2024 +0200 fix: precommit commit be1be2e2ca4f8736db0b180ab3d8cc6bff696412 Author: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> Date: Mon Oct 7 13:07:35 2024 +0000 chore: adding changelog file 3468.fixed.md commit f052a4dba77cb586be59232d2627d7814077f094 Author: Revathyvenugopal162 Date: Mon Oct 7 15:05:56 2024 +0200 fix: add build cheatsheet as env variable within doc-build * docs: expanding a bit troubleshooting advices and small format fix * docs: fix vale * fix: nproc tests * feat: adding env vars needed for multinode * feat: renaming hpc detection argument * docs: adding documentation * chore: adding changelog file 3466.documentation.md * fix: vale issues * docs: fix vale * docs: expanding a bit troubleshooting advices and small format fix * fix: nproc tests * revert: "chore: To fix sphinx build" This reverts commit e45d2e5d4fb97359605f445f462fa4b9cf76515a. * docs: clarifying where everything is running. * docs: expanding bash example * tests: fix * docs: adding `PYMAPDL_NPROC` to env var section * docs: fix vale issue * docs: fix vale issue * fix: replacing env var name * fix: unit tests * chore: adding changelog file 3466.documentation.md [dependabot-skip] * Apply suggestions from code review Co-authored-by: Camille <78221213+clatapie@users.noreply.github.com> * docs: apply suggestions from code review made by Kathy Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * docs: adding Kathy suggestion. --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> Co-authored-by: Camille <78221213+clatapie@users.noreply.github.com> Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * chore: adding changelog file 3506.documentation.md [dependabot-skip] * Update src/ansys/mapdl/core/launcher.py --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> Co-authored-by: Camille <78221213+clatapie@users.noreply.github.com> Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> --- doc/changelog.d/3466.documentation.md | 2 +- doc/changelog.d/3506.documentation.md | 1 + doc/source/user_guide/hpc/pymapdl.rst | 65 +++++++++---------- doc/source/user_guide/hpc/settings.rst | 6 +- doc/source/user_guide/hpc/troubleshooting.rst | 51 +++++++-------- doc/source/user_guide/mapdl.rst | 11 ++-- src/ansys/mapdl/core/launcher.py | 12 ++-- 7 files changed, 73 insertions(+), 75 deletions(-) create mode 100644 doc/changelog.d/3506.documentation.md diff --git a/doc/changelog.d/3466.documentation.md b/doc/changelog.d/3466.documentation.md index 902767602d..bd1c0ca801 100644 --- a/doc/changelog.d/3466.documentation.md +++ b/doc/changelog.d/3466.documentation.md @@ -1 +1 @@ -feat: passing tight integration env vars to mapdl \ No newline at end of file +docs: documenting using pymapdl on clusters diff --git a/doc/changelog.d/3506.documentation.md b/doc/changelog.d/3506.documentation.md new file mode 100644 index 0000000000..1977af6976 --- /dev/null +++ b/doc/changelog.d/3506.documentation.md @@ -0,0 +1 @@ +docs: review of documenting using pymapdl on clusters (#3466) \ No newline at end of file diff --git a/doc/source/user_guide/hpc/pymapdl.rst b/doc/source/user_guide/hpc/pymapdl.rst index 6f66ec52ca..14f366b63d 100644 --- a/doc/source/user_guide/hpc/pymapdl.rst +++ b/doc/source/user_guide/hpc/pymapdl.rst @@ -2,7 +2,7 @@ .. _ref_hpc_pymapdl_job: ======================= -PyMAPDL on HPC Clusters +PyMAPDL on HPC clusters ======================= @@ -10,18 +10,16 @@ Introduction ============ PyMAPDL communicates with MAPDL using the gRPC protocol. -This protocol offers many advantages and features, for more information +This protocol offers the many advantages and features described in see :ref:`ref_project_page`. -One of these features is that it is not required to have both, -PyMAPDL and MAPDL processes, running on the same machine. -This possibility open the door to many configurations, depending -on whether you run them both or not on the HPC compute nodes. -Additionally, you might to be able interact with them (``interactive`` mode) +One of these features is that it is not required to have both +PyMAPDL and MAPDL processes running on the same machine. +This possibility opens the door to many configurations, depending +on whether or not you run them both on the HPC compute nodes. +Additionally, you might be able interact with them (``interactive`` mode) or not (``batch`` mode). -Currently, the supported configurations are: - -* :ref:`ref_pymapdl_batch_in_cluster_hpc` +For information on supported configurations, see :ref:`ref_pymapdl_batch_in_cluster_hpc`. Since v0.68.5, PyMAPDL can take advantage of the tight integration @@ -31,9 +29,9 @@ to that job. For instance, if a SLURM job has allocated 8 nodes with 4 cores each, then PyMAPDL launches an MAPDL instance which uses 32 cores spawning across those 8 nodes. -This behaviour can turn off if passing the environment variable -:envvar:`PYMAPDL_ON_SLURM` or passing the argument `detect_HPC=False` -to :func:`launch_mapdl() `. +This behavior can turn off if passing the :envvar:`PYMAPDL_ON_SLURM` +environment variable or passing the ``detect_HPC=False`` argument +to the :func:`launch_mapdl() ` function. .. _ref_pymapdl_batch_in_cluster_hpc: @@ -41,16 +39,16 @@ to :func:`launch_mapdl() `. Submit a PyMAPDL batch job to the cluster from the entrypoint node ================================================================== -Many HPC clusters allow their users to login in a machine using -``ssh``, ``vnc``, ``rdp``, or similar technologies and submit a job +Many HPC clusters allow their users to log into a machine using +``ssh``, ``vnc``, ``rdp``, or similar technologies and then submit a job to the cluster from there. -This entrypoint machine, sometimes known as *head node* or *entrypoint node*, +This entrypoint machine, sometimes known as the *head node* or *entrypoint node*, might be a virtual machine (VDI/VM). In such cases, once the Python virtual environment with PyMAPDL is already set and is accessible to all the compute nodes, launching a -PyMAPDL job from the entrypoint is very easy to do using ``sbatch`` command. -Using ``sbatch`` command, the PyMAPDL runs and launches an MAPDL instance in +PyMAPDL job from the entrypoint node is very easy to do using the ``sbatch`` command. +When the ``sbatch`` command is used, PyMAPDL runs and launches an MAPDL instance in the compute nodes. No changes are needed on a PyMAPDL script to run it on an SLURM cluster. @@ -61,10 +59,10 @@ First the virtual environment must be activated in the current terminal. user@entrypoint-machine:~$ export VENV_PATH=/my/path/to/the/venv user@entrypoint-machine:~$ source $VENV_PATH/bin/activate -Once the virtual environment has been activated, you can launch any Python -script if they do have the proper Python shebang (``#!/usr/bin/env python3``). +Once the virtual environment is activated, you can launch any Python +script that has the proper Python shebang (``#!/usr/bin/env python3``). -For instance, to launch the following Python script ``main.py``: +For instance, assume that you want to launch the following ``main.py`` Python script: .. code-block:: python :caption: main.py @@ -80,21 +78,21 @@ For instance, to launch the following Python script ``main.py``: mapdl.exit() -You can just run in your console: +You can run this command in your console: .. code-block:: console (venv) user@entrypoint-machine:~$ sbatch main.py -Alternatively, you can remove the shebang from the python file and use a +Alternatively, you can remove the shebang from the Python file and use a Python executable call: .. code-block:: console (venv) user@entrypoint-machine:~$ sbatch python main.py -Additionally, you can change the amount of cores used in your -job, by setting the :envvar:`PYMAPDL_NPROC` to the desired value. +Additionally, you can change the number of cores used in your +job by setting the :envvar:`PYMAPDL_NPROC` environment variable to the desired value. .. code-block:: console @@ -107,8 +105,8 @@ You can also add ``sbatch`` options to the command: (venv) user@entrypoint-machine:~$ PYMAPDL_NPROC=4 sbatch main.py -For instance, to launch a PyMAPDL job which start a four cores MAPDL instance -on a 10 CPU SLURM job, you can use: +For instance, to launch a PyMAPDL job that starts a four-core MAPDL instance +on a 10-CPU SLURM job, you can run this command: .. code-block:: console @@ -118,13 +116,13 @@ on a 10 CPU SLURM job, you can use: Using a submission script ------------------------- -In case you need to customize more your job, you can create a SLURM -submission script to submit a PyMAPDL job. +If you need to customize your PyMAPDL job further, you can create a SLURM +submission script for submitting it. In this case, you must create two files: - Python script with the PyMAPDL code - Bash script that activates the virtual environment and calls the - Python script. + Python script .. code-block:: python :caption: main.py @@ -156,9 +154,9 @@ In this case, you must create two files: # Set env vars export MY_ENV_VAR=VALUE - # Activating Python virtual environment + # Activate Python virtual environment source /home/user/.venv/bin/activate - # Calling Python script + # Call Python script python main.py To start the simulation, you use this code: @@ -170,7 +168,7 @@ To start the simulation, you use this code: In this case, the Python virtual environment does not need to be activated before submission since it is activated later in the script. -The expected output of the job is +The expected output of the job follows: .. code-block:: text @@ -182,3 +180,4 @@ Python script. This bash script performs tasks such as creating environment variables, moving files to different directories, and printing to ensure your configuration is correct. + diff --git a/doc/source/user_guide/hpc/settings.rst b/doc/source/user_guide/hpc/settings.rst index f4366ab6f0..6654d291a4 100644 --- a/doc/source/user_guide/hpc/settings.rst +++ b/doc/source/user_guide/hpc/settings.rst @@ -10,13 +10,13 @@ Requirements Using PyMAPDL in an HPC environment managed by SLURM scheduler has certain requirements: -* **An Ansys installation must be accessible from all the compute nodes**. +* **An Ansys installation must be accessible from all the compute nodes.** This normally implies that the ``ANSYS`` installation directory is in a shared drive or directory. Your HPC cluster administrator should provide you with the path to the ``ANSYS`` directory. * **A compatible Python installation must be accessible from all the compute - nodes**. + nodes.** For compatible Python versions, see :ref:`ref_pymapdl_installation`. Additionally, you must perform a few key steps to ensure efficient job @@ -123,6 +123,7 @@ then you can run that script using: This command might take a minute or two to complete, depending on the amount of free resources available in the cluster. + On the console, you should see this output: .. code-block:: text @@ -132,4 +133,3 @@ On the console, you should see this output: If you see an error in the output, see :ref:`ref_hpc_troubleshooting`, especially :ref:`ref_python_venv_not_accesible`. - diff --git a/doc/source/user_guide/hpc/troubleshooting.rst b/doc/source/user_guide/hpc/troubleshooting.rst index 528c00fea9..da2286431f 100644 --- a/doc/source/user_guide/hpc/troubleshooting.rst +++ b/doc/source/user_guide/hpc/troubleshooting.rst @@ -7,8 +7,8 @@ Troubleshooting Debugging jobs -------------- -- Use ``--output`` and ``--error`` directives in batch scripts to captures - standard output and error messages to specific files. +- Use ``--output`` and ``--error`` directives in batch scripts to capture + standard output and error messages to specific files: .. code-block:: bash @@ -23,7 +23,8 @@ Debugging jobs - Check SLURM logs for error messages and debugging information. - It is also good idea to print the environment variables in your bash script, using - ``printenv``. Additionally, you can filter them using ``grep``. + ``printenv`` *bash* command. + Additionally, you can filter its output using ``grep`` *bash* command. .. code-block:: bash @@ -41,7 +42,7 @@ Debugging jobs - Use PyMAPDL logging to printout valuable information. To activate this, see :ref:`ref_debug_pymapdl`. -- In case you need more help, visit :ref:`ref_troubleshooting`. +- If you need more help, see :ref:`ref_troubleshooting`. .. _ref_python_venv_not_accesible: @@ -50,14 +51,14 @@ Python virtual environment is not accessible -------------------------------------------- If there is an error while testing the Python installation, it might mean that the Python environment is not accessible to the compute nodes. -For example, given the following *bash* script `test.sh`: +For example, assume you have the following `test.sh` *bash* script: .. code-block:: bash source /home/user/.venv/bin/activate python -c "from ansys.mapdl import core as pymapdl; pymapdl.report()" -The following output is shown after running in the terminal: +The following output is shown after running this script in the terminal: .. code-block:: console @@ -68,18 +69,18 @@ The following output is shown after running in the terminal: File "", line 1, in ImportError: No module named ansys.mapdl -As the output shows, PyMAPDL could not be found, meaning that either: +As the output shows, PyMAPDL could not be found, indicating one of the following problems: * The virtual environment does not have PyMAPDL installed. See :ref:`ref_install_pymapdl_on_hpc`. -* Or the script did not activate properly the virtual environment +* The script did not properly activate the virtual environment (``/home/user/.venv``). -For the second reason, there could be a number of reasons. +The second problem can occur due to a number of reasons. One of them is that the system Python distribution used to create the virtual environment is not accessible from the compute nodes -due to one of these reasons: +because of one of these situations: - The virtual environment has been created in a directory that is not accessible from the nodes. In this case, your terminal might @@ -92,21 +93,20 @@ due to one of these reasons: bash: .venv/bin/activate: No such file or directory Depending on your terminal configuration, the preceding error might be - sufficient to exit the terminal process, or not. - If not, the execution continues, and the subsequent ``python`` call is - executed using the default python executable. - It is very likely that the default ``python`` executable does not have - PyMAPDL installed, hence the ``ImportError`` error showed preceding might + sufficient to exit the terminal process. If it is not, the execution continues, + and the subsequent ``python`` call is executed using the default Python executable. + It is very likely that the default Python executable does not have + PyMAPDL installed. Hence the ``ImportError`` error might appear too. - The virtual environment has been created from a Python executable that is not available to the compute nodes. Hence, the virtual environment is not activated. - For example, you might be creating the virtual environment Using + For example, you might be creating the virtual environment using Python 3.10, but only Python 3.8 is available from the compute nodes. You can test which Python executable the cluster is using by starting an interactive session in a compute node with this code to list all commands - which starts with ``python``: + that start with ``python``: .. code-block:: console @@ -116,12 +116,11 @@ due to one of these reasons: .. the approach to solve this comes from: https://stackoverflow.com/questions/64188693/problem-with-python-environment-and-slurm-srun-sbatch -It should be noticed the preceding approach assumes that all the nodes have similar -configuration, hence all of them should have the same Python installations +It should be noted that the preceding approach assumes that all the nodes have similar +configurations. Hence, all of them should have the same Python installations available. -It is also convenient to be aware that environment variable modules can be -used to activate Python installations. +You can also use environment variable modules to activate Python installations. For more information, see :ref:`ref_envvar_modules_on_hpc`. @@ -158,10 +157,10 @@ In certain HPC environments the possibility of installing a different Python version is limited for security reasons. In such cases, the Python distribution available in the Ansys installation can be used. -This Python distribution is a customized Python (CPython) version for Ansys -products use only. -Its use is **discouraged** except for very advanced users and special use -cases. +This Python distribution is a customized Python (CPython) version for use only by Ansys +products. +Its use is **discouraged** unless you are a very advanced user or have a special use +case. This Python distribution is in the following directory, where ``%MAPDL_VERSION%`` is the three-digit Ansys version: @@ -178,7 +177,7 @@ For example, here is the directory for Ansys 2024 R2: In Ansys 2024 R1 and later, the unified installer includes CPython 3.10. -Earlier versions include CPython 3.7 +Earlier Ansys versions include CPython 3.7 (``/commonfiles/CPython/3_7/linx64/Release/python``). Because the Ansys installation must be available to all diff --git a/doc/source/user_guide/mapdl.rst b/doc/source/user_guide/mapdl.rst index d60d8e610d..c7ba053666 100644 --- a/doc/source/user_guide/mapdl.rst +++ b/doc/source/user_guide/mapdl.rst @@ -1099,20 +1099,19 @@ Environment variables There are several PyMAPDL-specific environment variables that can be used to control the default behavior of PyMAPDL or launching MAPDL. -It should be mentioned that these environment variables do not have +These environment variables do not have priority over the arguments given in the corresponding functions. -For instance: +Consider this command: .. code-block:: console user@machine:~$ export PYMAPDL_PORT=50052 user@machine:~$ python -c "from ansys.mapdl.core import launch_mapdl; mapdl=launch_mapdl(port=60053)" -The preceding command launches an MAPDL instance on the port 60053, -because the argument ``port`` has priority over the environment -variable :envvar:`PYMAPDL_PORT`. +This command launches an MAPDL instance on port 60053 +because the ``port`` argument has priority over the :envvar:`PYMAPDL_PORT` +environment variable. The following table describes all arguments. -These are described in the following table: +---------------------------------------+----------------------------------------------------------------------------------+ | :envvar:`PYMAPDL_START_INSTANCE` | Override the behavior of the | diff --git a/src/ansys/mapdl/core/launcher.py b/src/ansys/mapdl/core/launcher.py index b8a49ddaa7..9d71268e54 100644 --- a/src/ansys/mapdl/core/launcher.py +++ b/src/ansys/mapdl/core/launcher.py @@ -1153,15 +1153,15 @@ def launch_mapdl( export PYMAPDL_MAPDL_VERSION=22.2 detect_HPC: bool, optional - Whether detect if PyMAPDL is running on an HPC cluster or not. Currently - only SLURM clusters are supported. By detaul, it is set to true. - This option can be bypassed if the environment variable - ``PYMAPDL_ON_SLURM`` is set to "true". For more information visit + Whether detect if PyMAPDL is running on an HPC cluster. Currently + only SLURM clusters are supported. By default, it is set to true. + This option can be bypassed if the ``PYMAPDL_ON_SLURM`` + environment variable is set to "true". For more information, see :ref:`ref_hpc_slurm`. kwargs : dict, optional - These keyword arguments are interface specific or for - development purposes. See Notes for more details. + These keyword arguments are interface-specific or for + development purposes. For more information, see Notes. set_no_abort : :class:`bool` *(Development use only)* From 279cbba476e073995d9fef0979f154532660d7d5 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Thu, 24 Oct 2024 12:26:30 +0200 Subject: [PATCH 29/33] fix: raising port busy when connecting (#3507) * fix: raising port busy when connecting * chore: adding changelog file 3507.fixed.md [dependabot-skip] --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3507.fixed.md | 1 + src/ansys/mapdl/core/launcher.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 doc/changelog.d/3507.fixed.md diff --git a/doc/changelog.d/3507.fixed.md b/doc/changelog.d/3507.fixed.md new file mode 100644 index 0000000000..1f4db19a11 --- /dev/null +++ b/doc/changelog.d/3507.fixed.md @@ -0,0 +1 @@ +fix: raising port busy when connecting \ No newline at end of file diff --git a/src/ansys/mapdl/core/launcher.py b/src/ansys/mapdl/core/launcher.py index 9d71268e54..8f0904b8fc 100644 --- a/src/ansys/mapdl/core/launcher.py +++ b/src/ansys/mapdl/core/launcher.py @@ -2069,7 +2069,7 @@ def get_port(port: Optional[int] = None, start_instance: Optional[bool] = None) LOG.debug(f"Port in use. Incrementing port number. port={port}") else: - if port_in_use(port): + if start_instance and port_in_use(port): proc = get_process_at_port(port) if proc: if is_ansys_process(proc): From 9343e66bab3bc83c002a4155aeadb59d6e88ee55 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 25 Oct 2024 10:26:55 +0200 Subject: [PATCH 30/33] build: bump pyansys-tools-report from 0.8.0 to 0.8.1 in the testing group (#3516) * build: bump pyansys-tools-report in the testing group Bumps the testing group with 1 update: [pyansys-tools-report](https://github.com/ansys/pyansys-tools-report). Updates `pyansys-tools-report` from 0.8.0 to 0.8.1 - [Release notes](https://github.com/ansys/pyansys-tools-report/releases) - [Commits](https://github.com/ansys/pyansys-tools-report/compare/v0.8.0...v0.8.1) --- updated-dependencies: - dependency-name: pyansys-tools-report dependency-type: direct:production update-type: version-update:semver-patch dependency-group: testing ... Signed-off-by: dependabot[bot] * chore: adding changelog file 3516.dependencies.md [dependabot-skip] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3516.dependencies.md | 1 + pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 doc/changelog.d/3516.dependencies.md diff --git a/doc/changelog.d/3516.dependencies.md b/doc/changelog.d/3516.dependencies.md new file mode 100644 index 0000000000..147281e6e6 --- /dev/null +++ b/doc/changelog.d/3516.dependencies.md @@ -0,0 +1 @@ +build: bump pyansys-tools-report from 0.8.0 to 0.8.1 in the testing group \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 480246e92e..c9a2698c72 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,7 +62,7 @@ tests = [ "autopep8==2.3.1", "matplotlib==3.9.2", "pandas==2.2.3", - "pyansys-tools-report==0.8.0", + "pyansys-tools-report==0.8.1", "pyfakefs==5.7.1", "pyiges[full]==0.3.1", "pytest-cov==5.0.0", From 0c0472cfbb7a71006889f1f1c26ddecdcd281ccd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 25 Oct 2024 10:43:21 +0200 Subject: [PATCH 31/33] build: bump the core group with 2 updates (#3515) * build: bump the core group with 2 updates Bumps the core group with 2 updates: [ansys-tools-visualization-interface](https://github.com/ansys/ansys-tools-visualization-interface) and [ansys-sphinx-theme](https://github.com/ansys/ansys-sphinx-theme). Updates `ansys-tools-visualization-interface` from 0.4.5 to 0.4.7 - [Release notes](https://github.com/ansys/ansys-tools-visualization-interface/releases) - [Changelog](https://github.com/ansys/ansys-tools-visualization-interface/blob/main/CHANGELOG.md) - [Commits](https://github.com/ansys/ansys-tools-visualization-interface/compare/v0.4.5...v0.4.7) Updates `ansys-sphinx-theme` from 1.1.6 to 1.1.7 - [Release notes](https://github.com/ansys/ansys-sphinx-theme/releases) - [Commits](https://github.com/ansys/ansys-sphinx-theme/compare/v1.1.6...v1.1.7) --- updated-dependencies: - dependency-name: ansys-tools-visualization-interface dependency-type: direct:production update-type: version-update:semver-patch dependency-group: core - dependency-name: ansys-sphinx-theme dependency-type: direct:production update-type: version-update:semver-patch dependency-group: core ... Signed-off-by: dependabot[bot] * chore: adding changelog file 3515.dependencies.md [dependabot-skip] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3515.dependencies.md | 1 + pyproject.toml | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 doc/changelog.d/3515.dependencies.md diff --git a/doc/changelog.d/3515.dependencies.md b/doc/changelog.d/3515.dependencies.md new file mode 100644 index 0000000000..7c5203d58d --- /dev/null +++ b/doc/changelog.d/3515.dependencies.md @@ -0,0 +1 @@ +build: bump the core group with 2 updates \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index c9a2698c72..dae4f86501 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ jupyter = [ tests = [ "ansys-dpf-core==0.10.1", - "ansys-tools-visualization-interface==0.4.5", + "ansys-tools-visualization-interface==0.4.7", "autopep8==2.3.1", "matplotlib==3.9.2", "pandas==2.2.3", @@ -76,8 +76,8 @@ tests = [ doc = [ "ansys-dpf-core==0.10.1", "ansys-mapdl-reader==0.54.1", - "ansys-tools-visualization-interface==0.4.5", - "ansys-sphinx-theme==1.1.6", + "ansys-tools-visualization-interface==0.4.7", + "ansys-sphinx-theme==1.1.7", "grpcio==1.67.0", "imageio-ffmpeg==0.5.1", "imageio==2.36.0", From 76b7cf288e52aa31190f42f6863c15b3b74b6513 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Fri, 25 Oct 2024 13:39:50 +0200 Subject: [PATCH 32/33] refactor: externalise 'report' features to module (#3511) * refactor: externalise the 'report' features to another file * chore: adding changelog file 3511.added.md [dependabot-skip] --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- doc/changelog.d/3511.added.md | 1 + src/ansys/mapdl/core/__init__.py | 3 +- src/ansys/mapdl/core/misc.py | 271 +--------------------------- src/ansys/mapdl/core/report.py | 295 +++++++++++++++++++++++++++++++ tests/test_misc.py | 81 +-------- tests/test_report.py | 104 +++++++++++ 6 files changed, 404 insertions(+), 351 deletions(-) create mode 100644 doc/changelog.d/3511.added.md create mode 100644 src/ansys/mapdl/core/report.py create mode 100644 tests/test_report.py diff --git a/doc/changelog.d/3511.added.md b/doc/changelog.d/3511.added.md new file mode 100644 index 0000000000..49f004477a --- /dev/null +++ b/doc/changelog.d/3511.added.md @@ -0,0 +1 @@ +refactor: externalise the 'report' features to another file \ No newline at end of file diff --git a/src/ansys/mapdl/core/__init__.py b/src/ansys/mapdl/core/__init__.py index dd8981461b..f014905845 100644 --- a/src/ansys/mapdl/core/__init__.py +++ b/src/ansys/mapdl/core/__init__.py @@ -115,8 +115,9 @@ from ansys.mapdl.core.information import Information from ansys.mapdl.core.mapdl_grpc import MapdlGrpc as Mapdl -from ansys.mapdl.core.misc import Report, _check_has_ansys +from ansys.mapdl.core.misc import _check_has_ansys from ansys.mapdl.core.pool import MapdlPool +from ansys.mapdl.core.report import Report ############################################################################### # Convenient imports diff --git a/src/ansys/mapdl/core/misc.py b/src/ansys/mapdl/core/misc.py index 62fa90aa4a..e909b0d6a5 100644 --- a/src/ansys/mapdl/core/misc.py +++ b/src/ansys/mapdl/core/misc.py @@ -38,32 +38,12 @@ import numpy as np from ansys.mapdl import core as pymapdl -from ansys.mapdl.core import _HAS_ATP, _HAS_PYANSYS_REPORT, _HAS_PYVISTA, LOG - -if _HAS_ATP: - from ansys.tools.path import get_available_ansys_installations - -if _HAS_PYANSYS_REPORT: - import ansys.tools.report as pyansys_report - +from ansys.mapdl.core import _HAS_PYVISTA, LOG # path of this module MODULE_PATH = os.path.dirname(inspect.getfile(inspect.currentframe())) -ANSYS_ENV_VARS = [ - "PYMAPDL_START_INSTANCE", - "PYMAPDL_PORT", - "PYMAPDL_IP", - "PYMAPDL_MAPDL_EXEC", - "PYMAPDL_MAPDL_VERSION", - "PYMAPDL_MAX_MESSAGE_LENGTH", - "ON_CI", - "ON_LOCAL", - "P_SCHEMA", -] - - class ROUTINES(Enum): """MAPDL routines.""" @@ -117,255 +97,6 @@ def check_valid_routine(routine): return True -class Plain_Report: - def __init__(self, core, optional=None, additional=None, **kwargs): - """ - Base class for a plain report. - - - Based on `scooby `_ package. - - Parameters - ---------- - additional : iter[str] - List of packages or package names to add to output information. - core : iter[str] - The core packages to list first. - optional : iter[str] - A list of packages to list if they are available. If not available, - no warnings or error will be thrown. - """ - - self.additional = additional - self.core = core - self.optional = optional - self.kwargs = kwargs - - if os.name == "posix": - self.core.extend(["pexpect"]) - - # Information about the GPU - bare except in case there is a rendering - # bug that the user is trying to report. - if self.kwargs.get("gpu", False) and _HAS_PYVISTA: - from pyvista import PyVistaDeprecationWarning - - try: - from pyvista.utilities.errors import ( - GPUInfo, # deprecated in pyvista 0.40.0 - ) - except (PyVistaDeprecationWarning, ImportError): - from pyvista.report import GPUInfo - - try: - self.kwargs["extra_meta"] = [(t[1], t[0]) for t in GPUInfo().get_info()] - except RuntimeError as e: # pragma: no cover - self.kwargs["extra_meta"] = ("GPU Details", f"Error: {str(e)}") - else: - self.kwargs["extra_meta"] = ("GPU Details", "None") - - def get_version(self, package): - try: - import importlib.metadata as importlib_metadata - except ModuleNotFoundError: # pragma: no cover - import importlib_metadata - - try: - return importlib_metadata.version(package.replace(".", "-")) - except importlib_metadata.PackageNotFoundError: - return "Package not found" - - def __repr__(self): - header = [ - "-" * 79, - "\n", - "PyMAPDL Software and Environment Report", - "\n", - "Packages Requirements", - "*********************", - ] - - core = ["\nCore packages", "-------------"] - core.extend( - [ - f"{each.ljust(20)}: {self.get_version(each)}" - for each in self.core - if self.get_version(each) - ] - ) - - if self.optional: - optional = ["\nOptional packages", "-----------------"] - optional.extend( - [ - f"{each.ljust(20)}: {self.get_version(each)}" - for each in self.optional - if self.get_version(each) - ] - ) - else: - optional = [""] - - if self.additional: - additional = ["\nAdditional packages", "-----------------"] - additional.extend( - [ - f"{each.ljust(20)}: {self.get_version(each)}" - for each in self.additional - if self.get_version(each) - ] - ) - else: - additional = [""] - - return "\n".join(header + core + optional + additional) + self.mapdl_info() - - def mapdl_info(self): - """Return information regarding the ansys environment and installation.""" - # this is here to avoid circular imports - - # List installed Ansys - lines = ["", "Ansys Environment Report", "-" * 79] - lines = ["\n", "Ansys Installation", "******************"] - if _HAS_ATP: - mapdl_install = get_available_ansys_installations() - - if not mapdl_install: - lines.append("Unable to locate any Ansys installations") - else: - lines.append("Version Location") - lines.append("------------------") - for key in sorted(mapdl_install.keys()): - lines.append(f"{abs(key)} {mapdl_install[key]}") - else: - mapdl_install = None - lines.append( - "Unable to locate any Ansys installations because 'ansys-tools-path is not installed." - ) - - install_info = "\n".join(lines) - - env_info_lines = [ - "\n\n\nAnsys Environment Variables", - "***************************", - ] - n_var = 0 - for key, value in os.environ.items(): - if "AWP" in key or "CADOE" in key or "ANSYS" in key: - env_info_lines.append(f"{key:<30} {value}") - n_var += 1 - if not n_var: - env_info_lines.append("None") - env_info = "\n".join(env_info_lines) - - return install_info + env_info - - -# Determine which type of report will be used (depending on the -# available packages) -if _HAS_PYANSYS_REPORT: - base_report_class = pyansys_report.Report -else: # pragma: no cover - base_report_class = Plain_Report - - -class Report(base_report_class): - """A class for custom scooby.Report.""" - - def __init__( - self, - additional=None, - ncol=3, - text_width=80, - sort=False, - gpu=True, - ansys_vars=ANSYS_ENV_VARS, - ansys_libs=None, - ): - """Generate a :class:`scooby.Report` instance. - - Parameters - ---------- - additional : list(ModuleType), list(str) - List of packages or package names to add to output information. - - ncol : int, optional - Number of package-columns in html table; only has effect if - ``mode='HTML'`` or ``mode='html'``. Defaults to 3. - - text_width : int, optional - The text width for non-HTML display modes - - sort : bool, optional - Alphabetically sort the packages - - gpu : bool - Gather information about the GPU. Defaults to ``True`` but if - experiencing rendering issues, pass ``False`` to safely generate - a report. - - ansys_vars : list of str, optional - List containing the Ansys environment variables to be reported. - (e.g. ["MYVAR_1", "MYVAR_2" ...]). Defaults to ``None``. Only used for - the `pyansys-tools-report` package. - - ansys_libs : dict {str : str}, optional - Dictionary containing the Ansys libraries and versions to be reported. - (e.g. {"MyLib" : "v1.2", ...}). Defaults to ``None``. Only used for - the `pyansys-tools-report` package. - - """ - # Mandatory packages - core = [ - "ansys.mapdl.core", - "numpy", - "platformdirs", - "scipy", - "grpc", # grpcio - "ansys.api.mapdl.v0", # ansys-api-mapdl-v0 - "ansys.mapdl.reader", # ansys-mapdl-reader - "google.protobuf", # protobuf library - "ansys-math-core", - ] - - # Optional packages - optional = [ - "matplotlib", - "pyvista", - "pyiges", - "tqdm", - "ansys-tools-visualization_interface", - "pandas", - ] - - if _HAS_PYANSYS_REPORT: - # Combine all packages into one - all_mapdl_packages = core + optional - if additional is not None: - all_mapdl_packages += additional - - # Call the pyansys_report.Report constructor - super().__init__( - additional=all_mapdl_packages, - ncol=ncol, - text_width=text_width, - sort=sort, - gpu=gpu, - ansys_vars=ansys_vars, - ansys_libs=ansys_libs, - ) - else: - # Call the PlainReport constructor - super().__init__( - additional=additional, - core=core, - optional=optional, - ncol=ncol, - text_width=text_width, - sort=sort, - gpu=gpu, - ) - - def is_float(input_string): """Returns true when a string can be converted to a float""" try: diff --git a/src/ansys/mapdl/core/report.py b/src/ansys/mapdl/core/report.py new file mode 100644 index 0000000000..451915ce80 --- /dev/null +++ b/src/ansys/mapdl/core/report.py @@ -0,0 +1,295 @@ +# Copyright (C) 2016 - 2024 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Module for report features""" +import os + +from ansys.mapdl.core import _HAS_ATP, _HAS_PYANSYS_REPORT, _HAS_PYVISTA + +if _HAS_PYANSYS_REPORT: + import ansys.tools.report as pyansys_report + +if _HAS_ATP: + from ansys.tools.path import get_available_ansys_installations + +ANSYS_ENV_VARS = [ + "PYMAPDL_START_INSTANCE", + "PYMAPDL_PORT", + "PYMAPDL_IP", + "PYMAPDL_NPROC", + "PYMAPDL_MAPDL_EXEC", + "PYMAPDL_MAPDL_VERSION", + "PYMAPDL_MAX_MESSAGE_LENGTH", + "PYMAPDL_ON_SLURM", + "ON_CI", + "ON_LOCAL", + "ON_REMOTE", + "P_SCHEMA", +] + + +class Plain_Report: + def __init__(self, core, optional=None, additional=None, **kwargs): + """ + Base class for a plain report. + + + Based on `scooby `_ package. + + Parameters + ---------- + additional : iter[str] + List of packages or package names to add to output information. + core : iter[str] + The core packages to list first. + optional : iter[str] + A list of packages to list if they are available. If not available, + no warnings or error will be thrown. + """ + + self.additional = additional + self.core = core + self.optional = optional + self.kwargs = kwargs + + if os.name == "posix": + self.core.extend(["pexpect"]) + + # Information about the GPU - bare except in case there is a rendering + # bug that the user is trying to report. + if self.kwargs.get("gpu", False) and _HAS_PYVISTA: + + try: + from pyvista.report import GPUInfo + except ImportError: + from pyvista.utilities.errors import ( + GPUInfo, # deprecated in pyvista 0.40.0 + ) + + try: + self.kwargs["extra_meta"] = [(t[1], t[0]) for t in GPUInfo().get_info()] + except RuntimeError as e: # pragma: no cover + self.kwargs["extra_meta"] = ("GPU Details", f"Error: {str(e)}") + else: + self.kwargs["extra_meta"] = ("GPU Details", "None") + + def get_version(self, package): + try: + import importlib.metadata as importlib_metadata + except ModuleNotFoundError: # pragma: no cover + import importlib_metadata + + try: + return importlib_metadata.version(package.replace(".", "-")) + except importlib_metadata.PackageNotFoundError: + return "Package not found" + + def __repr__(self): + header = [ + "-" * 79, + "\n", + "PyMAPDL Software and Environment Report", + "\n", + "Packages Requirements", + "*********************", + ] + + core = ["\nCore packages", "-------------"] + core.extend( + [ + f"{each.ljust(20)}: {self.get_version(each)}" + for each in self.core + if self.get_version(each) + ] + ) + + if self.optional: + optional = ["\nOptional packages", "-----------------"] + optional.extend( + [ + f"{each.ljust(20)}: {self.get_version(each)}" + for each in self.optional + if self.get_version(each) + ] + ) + else: + optional = [""] + + if self.additional: + additional = ["\nAdditional packages", "-----------------"] + additional.extend( + [ + f"{each.ljust(20)}: {self.get_version(each)}" + for each in self.additional + if self.get_version(each) + ] + ) + else: + additional = [""] + + return "\n".join(header + core + optional + additional) + self.mapdl_info() + + def mapdl_info(self): + """Return information regarding the ansys environment and installation.""" + # this is here to avoid circular imports + + # List installed Ansys + lines = ["", "Ansys Environment Report", "-" * 79] + lines = ["\n", "Ansys Installation", "******************"] + if _HAS_ATP: + mapdl_install = get_available_ansys_installations() + + if not mapdl_install: + lines.append("Unable to locate any Ansys installations") + else: + lines.append("Version Location") + lines.append("------------------") + for key in sorted(mapdl_install.keys()): + lines.append(f"{abs(key)} {mapdl_install[key]}") + else: + mapdl_install = None + lines.append( + "Unable to locate any Ansys installations because 'ansys-tools-path is not installed." + ) + + install_info = "\n".join(lines) + + env_info_lines = [ + "\n\n\nAnsys Environment Variables", + "***************************", + ] + n_var = 0 + for key, value in os.environ.items(): + if "AWP" in key or "CADOE" in key or "ANSYS" in key: + env_info_lines.append(f"{key:<30} {value}") + n_var += 1 + if not n_var: + env_info_lines.append("None") + env_info = "\n".join(env_info_lines) + + return install_info + env_info + + +# Determine which type of report will be used (depending on the +# available packages) +if _HAS_PYANSYS_REPORT: + base_report_class = pyansys_report.Report +else: # pragma: no cover + base_report_class = Plain_Report + + +class Report(base_report_class): + """A class for custom scooby.Report.""" + + def __init__( + self, + additional=None, + ncol=3, + text_width=80, + sort=False, + gpu=True, + ansys_vars=ANSYS_ENV_VARS, + ansys_libs=None, + ): + """Generate a :class:`scooby.Report` instance. + + Parameters + ---------- + additional : list(ModuleType), list(str) + List of packages or package names to add to output information. + + ncol : int, optional + Number of package-columns in html table; only has effect if + ``mode='HTML'`` or ``mode='html'``. Defaults to 3. + + text_width : int, optional + The text width for non-HTML display modes + + sort : bool, optional + Alphabetically sort the packages + + gpu : bool + Gather information about the GPU. Defaults to ``True`` but if + experiencing rendering issues, pass ``False`` to safely generate + a report. + + ansys_vars : list of str, optional + List containing the Ansys environment variables to be reported. + (e.g. ["MYVAR_1", "MYVAR_2" ...]). Defaults to ``None``. Only used for + the `pyansys-tools-report` package. + + ansys_libs : dict {str : str}, optional + Dictionary containing the Ansys libraries and versions to be reported. + (e.g. {"MyLib" : "v1.2", ...}). Defaults to ``None``. Only used for + the `pyansys-tools-report` package. + + """ + # Mandatory packages + core = [ + "ansys.mapdl.core", + "numpy", + "platformdirs", + "scipy", + "grpc", # grpcio + "ansys.api.mapdl.v0", # ansys-api-mapdl-v0 + "ansys.mapdl.reader", # ansys-mapdl-reader + "google.protobuf", # protobuf library + "ansys-math-core", + ] + + # Optional packages + optional = [ + "matplotlib", + "pyvista", + "pyiges", + "tqdm", + "ansys-tools-visualization_interface", + "pandas", + ] + + if _HAS_PYANSYS_REPORT: + # Combine all packages into one + all_mapdl_packages = core + optional + if additional is not None: + all_mapdl_packages += additional + + # Call the pyansys_report.Report constructor + super().__init__( + additional=all_mapdl_packages, + ncol=ncol, + text_width=text_width, + sort=sort, + gpu=gpu, + ansys_vars=ansys_vars, + ansys_libs=ansys_libs, + ) + else: + # Call the PlainReport constructor + super().__init__( + additional=additional, + core=core, + optional=optional, + ncol=ncol, + text_width=text_width, + sort=sort, + gpu=gpu, + ) diff --git a/tests/test_misc.py b/tests/test_misc.py index d9fc88681a..95266c70ca 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -27,12 +27,6 @@ import numpy as np import pytest -from conftest import has_dependency, requires - -if has_dependency("pyvista"): - from pyvista.plotting import system_supports_plotting - -from ansys.mapdl import core as pymapdl from ansys.mapdl.core.misc import ( check_valid_ip, check_valid_port, @@ -44,18 +38,7 @@ requires_package, run_as_prep7, ) - - -@requires("pyvista") -def test_report(): - report = pymapdl.Report( - additional=["matplotlib", "pyvista", "pyiges", "tqdm"], - gpu=system_supports_plotting(), - ) - assert "PyAnsys Software and Environment Report" in str(report) - - # Check that when adding additional (repeated) packages, they appear only once - assert str(report).count("pyvista") == 1 +from conftest import requires @pytest.mark.parametrize( @@ -234,68 +217,6 @@ def test_load_file_local(mapdl, tmpdir, file_): assert file_ not in mapdl.list_files() -def test_plain_report(): - from ansys.mapdl.core.misc import Plain_Report - - core = ["numpy", "ansys.mapdl.reader"] - optional = ["pyvista", "tqdm"] - additional = ["scipy", "ger"] - - report = Plain_Report(core=core, optional=optional, additional=additional, gpu=True) - rep_str = report.__repr__() - - for each in core + optional + additional: - assert each in rep_str - - # There should be only one package not found ("ger") - assert "Package not found" in rep_str - not_found_packages = 1 - - # Plus the not additional packages - if not has_dependency("pyvista"): - not_found_packages += 1 - if not has_dependency("tqdm"): - not_found_packages += 1 - if not has_dependency("ansys.mapdl.reader"): - not_found_packages += 1 - if not has_dependency("scipy"): - not_found_packages += 1 - if not has_dependency("pexpect"): - not_found_packages += 1 - - _rep_str = rep_str.replace("Package not found", "", not_found_packages) - assert "Package not found" not in _rep_str - - assert "\n" in rep_str - assert len(rep_str.splitlines()) > 3 - - assert "Core packages" in rep_str - assert "Optional packages" in rep_str - assert "Additional packages" in rep_str - - # Plain report should not represent GPU details evenif asked for - assert "GPU Details" not in rep_str - - -def test_plain_report_no_options(): - from ansys.mapdl.core.misc import Plain_Report - - core = ["numpy", "ansys.mapdl.reader"] - - report = Plain_Report(core=core) - rep_str = report.__repr__() - - for each in core: - assert each in rep_str - - assert "\n" in rep_str - assert len(rep_str.splitlines()) > 3 - - assert "Core packages" in rep_str - assert "Optional packages" not in rep_str - assert "Additional packages" not in rep_str - - def test_requires_package_decorator(): class myClass: @requires_package("numpy") diff --git a/tests/test_report.py b/tests/test_report.py new file mode 100644 index 0000000000..4857dbf345 --- /dev/null +++ b/tests/test_report.py @@ -0,0 +1,104 @@ +# Copyright (C) 2016 - 2024 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Test report features""" + +from conftest import has_dependency, requires + +if has_dependency("pyvista"): + from pyvista.plotting import system_supports_plotting + +from ansys.mapdl import core as pymapdl + + +@requires("pyvista") +def test_report(): + report = pymapdl.Report( + additional=["matplotlib", "pyvista", "pyiges", "tqdm"], + gpu=system_supports_plotting(), + ) + assert "PyAnsys Software and Environment Report" in str(report) + + # Check that when adding additional (repeated) packages, they appear only once + assert str(report).count("pyvista") == 1 + + +def test_plain_report(): + from ansys.mapdl.core.report import Plain_Report + + core = ["numpy", "ansys.mapdl.reader"] + optional = ["pyvista", "tqdm"] + additional = ["scipy", "ger"] + + report = Plain_Report(core=core, optional=optional, additional=additional, gpu=True) + rep_str = report.__repr__() + + for each in core + optional + additional: + assert each in rep_str + + # There should be only one package not found ("ger") + assert "Package not found" in rep_str + not_found_packages = 1 + + # Plus the not additional packages + if not has_dependency("pyvista"): + not_found_packages += 1 + if not has_dependency("tqdm"): + not_found_packages += 1 + if not has_dependency("ansys.mapdl.reader"): + not_found_packages += 1 + if not has_dependency("scipy"): + not_found_packages += 1 + if not has_dependency("pexpect"): + not_found_packages += 1 + + _rep_str = rep_str.replace("Package not found", "", not_found_packages) + assert "Package not found" not in _rep_str + + assert "\n" in rep_str + assert len(rep_str.splitlines()) > 3 + + assert "Core packages" in rep_str + assert "Optional packages" in rep_str + assert "Additional packages" in rep_str + + # Plain report should not represent GPU details evenif asked for + assert "GPU Details" not in rep_str + + +def test_plain_report_no_options(): + from ansys.mapdl.core.report import Plain_Report + + core = ["numpy", "ansys.mapdl.reader"] + + report = Plain_Report(core=core) + rep_str = report.__repr__() + + for each in core: + assert each in rep_str + + assert "\n" in rep_str + assert len(rep_str.splitlines()) > 3 + + assert "Core packages" in rep_str + assert "Optional packages" not in rep_str + assert "Additional packages" not in rep_str From 88b8f14184e7124297e24bafb7fd4211a0e89313 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Fri, 25 Oct 2024 14:10:39 +0200 Subject: [PATCH 33/33] test: check all commands are submitted (#3501) * test: increasing testing by testing all MAPDL commands * test: test command name in output. * refactor: parse module and adding new functions to externalise to this module command output. * refactor: externalising parsing to parse module * fix: cmwrite command * fix: small fix * chore: improve pytest default configuration * chore: adding changelog file 3501.added.md [dependabot-skip] * refactor: removing unused variable * chore: adding changelog file 3501.added.md [dependabot-skip] * test: including `_commands` in coverage * tests: including tests * fix: docstring --------- Co-authored-by: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> --- codecov.yml | 1 - doc/changelog.d/3501.added.md | 1 + pyproject.toml | 2 +- .../_commands/apdl/parameter_definition.py | 4 +- .../core/_commands/database/components.py | 186 ++---------------- src/ansys/mapdl/core/_commands/parse.py | 58 ++++-- .../mapdl/core/_commands/post1_/special.py | 2 +- .../core/_commands/preproc/element_type.py | 6 +- .../mapdl/core/_commands/preproc/elements.py | 4 +- .../mapdl/core/_commands/preproc/keypoints.py | 29 +-- .../_commands/preproc/material_data_tables.py | 7 +- .../mapdl/core/_commands/preproc/nodes.py | 8 +- .../core/_commands/preproc/special_purpose.py | 2 +- .../core/_commands/session/run_controls.py | 2 +- .../_commands/solution/analysis_options.py | 2 +- src/ansys/mapdl/core/mapdl_extended.py | 8 +- src/ansys/mapdl/core/mapdl_grpc.py | 4 +- tests/test_commands.py | 85 ++++++++ 18 files changed, 178 insertions(+), 233 deletions(-) create mode 100644 doc/changelog.d/3501.added.md diff --git a/codecov.yml b/codecov.yml index a5f34c0a84..5185040ae4 100644 --- a/codecov.yml +++ b/codecov.yml @@ -1,5 +1,4 @@ ignore: - - "src/ansys/mapdl/core/_commands" - "src/ansys/mapdl/core/jupyter.py" - "src/ansys/mapdl/core/mapdl_console.py" - "src/ansys/mapdl/core/mapdl_inprocess.py" diff --git a/doc/changelog.d/3501.added.md b/doc/changelog.d/3501.added.md new file mode 100644 index 0000000000..31292e041b --- /dev/null +++ b/doc/changelog.d/3501.added.md @@ -0,0 +1 @@ +test: check all commands are submitted \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index dae4f86501..ea085cd601 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -120,7 +120,7 @@ pymapdl_convert_script = "ansys.mapdl.core.cli:old_pymapdl_convert_script_entry_ pymapdl = "ansys.mapdl.core.cli:main" [tool.pytest.ini_options] -addopts = "-ra -vvv --maxfail=10" +addopts = "-rxXsa -vvv --maxfail=10" junit_family = "legacy" filterwarnings = [ "ignore::FutureWarning", diff --git a/src/ansys/mapdl/core/_commands/apdl/parameter_definition.py b/src/ansys/mapdl/core/_commands/apdl/parameter_definition.py index 1a2e6a11a4..75915afe8e 100644 --- a/src/ansys/mapdl/core/_commands/apdl/parameter_definition.py +++ b/src/ansys/mapdl/core/_commands/apdl/parameter_definition.py @@ -336,7 +336,7 @@ def get( command = f"*GET,{par},{entity},{entnum},{item1},{it1num},{item2},{it2num}" return self.run(command, **kwargs) - def inquire(self, strarray="", func="", arg1="", arg2=""): + def inquire(self, strarray="", func="", arg1="", arg2="", **kwargs): """Returns system information. By default, with no arguments, it returns the working directory. @@ -489,7 +489,7 @@ def inquire(self, strarray="", func="", arg1="", arg2=""): >>> mapdl.inquire('', 'RSTFILE') 'file.rst' """ - return self.run(f"/INQUIRE,{strarray},{func},{arg1},{arg2}") + return self.run(f"/INQUIRE,{strarray},{func},{arg1},{arg2}", **kwargs) def parres(self, lab="", fname="", ext="", **kwargs): """Reads parameters from a file. diff --git a/src/ansys/mapdl/core/_commands/database/components.py b/src/ansys/mapdl/core/_commands/database/components.py index 9911aff565..b826947cec 100644 --- a/src/ansys/mapdl/core/_commands/database/components.py +++ b/src/ansys/mapdl/core/_commands/database/components.py @@ -495,185 +495,35 @@ def cmsel( def cmwrite( self, - option: str = "", fname: str = "", ext: str = "", - fnamei: str = "", - exti: str = "", fmat: str = "", **kwargs: Dict[Any, Any], - ) -> None: + ) -> str: """Writes node and element components and assemblies to a file. APDL Command: CMWRITE Parameters ---------- - option - Selects which data to write: + Fname + File name and directory path (248 characters maximum, including the + characters needed for the directory path). An unspecified directory + path defaults to the working directory; in this case, you can use + all 248 characters for the file name. + The file name defaults to ``Jobname``. + Ext + Filename extension (eight-character maximum). + The extension defaults to ``CM`` if ``fname`` is blank. - ALL - Write all appropriate geometry, material property, - load, and component data (default). Two files will - be produced. ``Fname.Ext`` will contain all data items - mentioned in "Notes", except the solid model - data. Fnamei.Exti will contain the solid model - geometry and solid model loads data in the form of - IGES commands. This option is not valid when - CDOPT,ANF is active. - - COMB - Write all data mentioned, but to a single file, - ``Fname.Ext``. Solid model geometry data will be - written in either IGES or ANF format as specified - in the CDOPT command, followed by the remainder of - the data in the form of ANSYS commands. More - information on these (IGES/ANF) file formats is - provided in "Notes". - - DB - Write all database information except the solid model - and solid model loads to ``Fname.Ext`` in the form of - ANSYS commands. This option is not valid when - CDOPT,ANF is active. - - SOLID - Write only the solid model geometry and solid - model load data. This output will be in IGES or - ANF format, as specified in the CDOPT - command. More information on these (IGES/ANF) file - formats is provided in "Notes". - - GEOM - Write only element and nodal geometry data. Neither - solid model geometry nor element attribute data - will be written. One file, ``Fname.Ext``, will be - produced. Use CDREAD,DB to read in a file written - with this option. Element types [ET] compatible - with the connectivity of the elements on the file - must first be defined before reading the file in - with CDREAD,DB. - - CM - Write only node and element component and geometry - data to ``Fname.Ext``. - - MAT - Write only material property data (both linear and - nonlinear) to ``Fname.Ext`` . - - LOAD - Write only loads for current load step to - ``Fname.Ext``. - - SECT - Write only section data to ``Fname.Ext``. Pretension - sections are not included. - - - fname - File name and directory path (248 characters maximum, - including the characters needed for the directory path). - An unspecified directory path defaults to the working - directory; in this case, you can use all 248 characters - for the file name. - - ext - Filename extension (eight-character maximum). The - extension defaults to CDB if Fname is blank. - - fnamei - Name of the IGES file and its directory path (248 - characters maximum, including directory). If you do not - specify a directory path, it will default to your working - directory and you can use all 248 characters for the file - name. - - The file name defaults to Fname. Used only if - Option = ALL or SOLID. Previous data on this file, if any, - is overwritten. - - Exti - Filename extension (eight-character maximum). The - extension defaults to IGES in all cases, except when - CDOPT,ANF is active and CDWRITE, Option = SOLID. In this - case Exti = ANF. - - fmat - Format of the output file (defaults to BLOCKED). - - BLOCKED - Blocked format. This format allows faster - reading of the output file. The time savings is - most significant when BLOCKED is used to read - .cdb files associated with very large models. - - UNBLOCKED - Unblocked format. + Fmat + Format of the output file (defaults to ``BLOCKED``). + + * ``BLOCKED``: Blocked format. This format allows faster + reading of the file. + + * ``UNBLOCKED``: Unblocked format. - Notes - ----- - Load data includes the current load step only. Loads applied - to the solid model (if any) are automatically transferred to - the finite element model when this command is issued. ``CDWRITE`` - writes out solid model loads for meshed models only. If the - model is not meshed, the solid model loads cannot be - saved. Component data include component definitions, but not - assembly definitions. Appropriate ``NUMOFF`` commands are included - at the beginning of the file; this is to avoid overlap of an - existing database when the file is read in. - - Solution control commands are typically not written to the - file unless you specifically change a default solution - setting. - - ``CDWRITE`` does not support the ``GSBDATA`` and ``GSGDATA`` commands, and - these commands are not written to the file. - - The data may be reread (on a different machine, for example) - with the ``CDREAD`` command. Caution: When the file is read in, - the ``NUMOFF,MAT`` command may cause a mismatch between material - definitions and material numbers referenced by certain loads - and element real constants. See ``NUMOFF`` for details. Also, be - aware that the files created by the ``CDWRITE`` command explicitly - set the active coordinate system to Cartesian (CSYS,0). - - You should generally use the blocked format (Fmat = BLOCKED) - when writing out model data with ``CDWRITE``. This is a compressed - data format that greatly reduces the time required to read - large models through the CDREAD command. The blocked and - unblocked formats are described in Chapter 3 of the Guide to - Interfacing with ANSYS. - - If you use ``CDWRITE`` in any of the derived products (ANSYS - Mechanical Pro, ANSYS Mechanical Premium), then before reading - the file, you must edit the Jobname.cdb file to remove - commands that are not available in the respective component - product. - - The ``CDWRITE`` command writes PART information for any ANSYS - LS-DYNA input file to the Jobname.cdb file via the EDPREAD - command. (EDPREAD is not a documented command; it is written - only when the ``CDWRITE`` command is issued.) The PART information - can be automatically read in via the CDREAD command; however, - if more than one Jobname.cdb file is read, the PART list from - the last Jobname.cdb file overwrites the existing PART list of - the total model. This behavior affects all PART-related - commands contained in the Jobname.cdb file. You can join - models, but not PART-related inputs, which you must modify - using the newly-created PART numbers. In limited cases, an - update of the PART list (EDWRITE,PUPDATE) is possible; doing - so requires that no used combination of MAT/TYPE/REAL appears - more than once in the list. - - The ``CDWRITE`` command does not support (for beam meshing) any - line operation that relies on solid model associativity. For - example, meshing the areas adjacent to the meshed line, - plotting the line that contains the orientation nodes, or - clearing the mesh from the line that contains orientation - nodes may not work as expected. For more information about - beam meshing, see Meshing Your Solid Model in the Modeling and - Meshing Guide. """ - command = f"CDWRITE,{option},{fname},{ext},,{fnamei},{exti},{fmat}" + command = f"CMWRITE,{fname},{ext},,,{fmat}" return self.run(command, **kwargs) diff --git a/src/ansys/mapdl/core/_commands/parse.py b/src/ansys/mapdl/core/_commands/parse.py index 87f4ab43b7..a1a4820dcc 100644 --- a/src/ansys/mapdl/core/_commands/parse.py +++ b/src/ansys/mapdl/core/_commands/parse.py @@ -39,14 +39,14 @@ NUM_PATTERN = re.compile(NUMERIC_CONST_PATTERN, re.VERBOSE) -def parse_kdist(msg): +def parse_kdist(msg: Optional[str] = None) -> Optional[int]: """Parse the keypoint value from a keypoint message""" finds = re.findall(NUM_PATTERN, msg)[-4:] if len(finds) == 4: return [float(val) for val in finds] -def parse_et(msg: Optional[str]) -> Optional[int]: +def parse_et(msg: Optional[str] = None) -> Optional[int]: """Parse local element type number definition message and return element type number. """ @@ -56,7 +56,7 @@ def parse_et(msg: Optional[str]) -> Optional[int]: return int(res.group(2)) -def parse_e(msg: Optional[str]) -> Optional[int]: +def parse_e(msg: Optional[str] = None) -> Optional[int]: """Parse create element message and return element number.""" if msg: res = re.search(r"(ELEMENT\s*)([0-9]+)", msg) @@ -64,7 +64,19 @@ def parse_e(msg: Optional[str]) -> Optional[int]: return int(res.group(2)) -def parse_kpoint(msg): +def parse_k(msg: Optional[str] = None) -> Optional[int]: + """Parse output from ``K`` command""" + if msg: + if not re.search(r"KEYPOINT NUMBER", msg): + res = re.search(r"(KEYPOINT\s*)([0-9]+)", msg) + else: + res = re.search(r"(KEYPOINT NUMBER =\s*)([0-9]+)", msg) + + if res: + return int(res.group(2)) + + +def parse_kpoint(msg: Optional[str] = None) -> Optional[int]: """Parse create keypoint message and return keypoint number.""" if msg: res = re.search(r"kpoint=\s+(\d+)\s+", msg) @@ -72,7 +84,7 @@ def parse_kpoint(msg): return int(res.group(1)) -def parse_output_areas(msg): +def parse_output_areas(msg: Optional[str] = None) -> Optional[int]: """Parse create area message and return area number.""" if msg: res = re.search(r"(OUTPUT AREAS =\s*)([0-9]+)", msg) @@ -83,7 +95,7 @@ def parse_output_areas(msg): return int(res.group(2)) -def parse_a(msg): +def parse_a(msg: Optional[str] = None) -> Optional[int]: """Parse create area message and return area number.""" if msg: res = re.search(r"(AREA NUMBER =\s*)([0-9]+)", msg) @@ -91,7 +103,7 @@ def parse_a(msg): return int(res.group(2)) -def parse_line_no(msg): +def parse_line_no(msg: Optional[str] = None) -> Optional[int]: """Parse create line message and return line number.""" if msg: res = re.search(r"LINE NO[.]=\s+(\d+)", msg) @@ -99,14 +111,14 @@ def parse_line_no(msg): return int(res.group(1)) -def parse_line_nos(msg): +def parse_line_nos(msg: Optional[str] = None) -> Optional[int]: if msg: matches = re.findall(r"LINE NO[.]=\s*(\d*)", msg) if matches: return [int(match) for match in matches] -def parse_v(msg): +def parse_v(msg: Optional[str] = None) -> Optional[int]: """Parse volume message and return volume number""" if msg: res = re.search(r"(VOLUME NUMBER =\s*)([0-9]+)", msg) @@ -114,7 +126,7 @@ def parse_v(msg): return int(res.group(2)) -def parse_output_volume_area(msg): +def parse_output_volume_area(msg: Optional[str] = None) -> Optional[int]: """Parse create area message and return area or volume number""" if msg: res = re.search(r"OUTPUT (AREA|VOLUME|AREAS) =\s*([0-9]+)", msg) @@ -122,8 +134,32 @@ def parse_output_volume_area(msg): return int(res.group(2)) -def parse_ndist(msg): +def parse_n(msg: Optional[str] = None) -> Optional[int]: + """Parse output of ``N``""" + if msg: + res = re.search(r"(NODE\s*)([0-9]+)", msg) + if res is not None: + return int(res.group(2)) + + +def parse_ndist(msg: Optional[str] = None) -> Optional[int]: """Parse the node value from a node message""" finds = re.findall(NUM_PATTERN, msg)[-4:] if len(finds) == 4: return [float(val) for val in finds] + + +def parse_kl(msg: Optional[str] = None) -> Optional[int]: + """Parse the output of ``KL``.""" + if msg: + res = re.search(r"KEYPOINT\s+(\d+)\s+", msg) + if res is not None: + return int(res.group(1)) + + +def parse_knode(msg: Optional[str] = None) -> Optional[int]: + """Parse the output of ``KNODE``.""" + if msg: + res = re.search(r"KEYPOINT NUMBER =\s+(\d+)", msg) + if res is not None: + return int(res.group(1)) diff --git a/src/ansys/mapdl/core/_commands/post1_/special.py b/src/ansys/mapdl/core/_commands/post1_/special.py index 391c02a9cb..798bc5751e 100644 --- a/src/ansys/mapdl/core/_commands/post1_/special.py +++ b/src/ansys/mapdl/core/_commands/post1_/special.py @@ -1911,7 +1911,7 @@ def prcamp( Distributed ANSYS Restriction: This command is not supported in Distributed ANSYS. """ - command = f"PRCAMP,{option},{slope},{unit},{freqb},{cname},{stabval},{keyallfreq},{keynegfreq}" + command = f"PRCAMP,{option},{slope},{unit},{freqb},{cname},{stabval},{keyallfreq},{keynegfreq},{keywhirl}" return self.run(command, **kwargs) def prfar( diff --git a/src/ansys/mapdl/core/_commands/preproc/element_type.py b/src/ansys/mapdl/core/_commands/preproc/element_type.py index 9e1c97b535..bfe21c2d46 100644 --- a/src/ansys/mapdl/core/_commands/preproc/element_type.py +++ b/src/ansys/mapdl/core/_commands/preproc/element_type.py @@ -25,7 +25,7 @@ """ from typing import Optional, Union -from ansys.mapdl.core._commands.parse import parse_et +from ansys.mapdl.core._commands import parse from ansys.mapdl.core.mapdl_types import MapdlInt @@ -272,9 +272,9 @@ def et( """ command = ( - f"ET,{itype},{ename},{kop1},{kop2},{kop3},{kop4}," f"{kop5},{kop6},{inopr}" + f"ET,{itype},{ename},{kop1},{kop2},{kop3},{kop4},{kop5},{kop6},{inopr}" ) - return parse_et(self.run(command, **kwargs)) + return parse.parse_et(self.run(command, **kwargs)) def etchg(self, cnv: str = "", **kwargs) -> Optional[str]: """Changes element types to their corresponding types. diff --git a/src/ansys/mapdl/core/_commands/preproc/elements.py b/src/ansys/mapdl/core/_commands/preproc/elements.py index ac5358060e..56c8742608 100644 --- a/src/ansys/mapdl/core/_commands/preproc/elements.py +++ b/src/ansys/mapdl/core/_commands/preproc/elements.py @@ -22,7 +22,7 @@ from typing import Optional, Union -from ansys.mapdl.core._commands.parse import parse_e +from ansys.mapdl.core._commands import parse from ansys.mapdl.core.mapdl_types import MapdlFloat, MapdlInt @@ -141,7 +141,7 @@ def e( """ command = f"E,{i},{j},{k},{l},{m},{n},{o},{p}" - return parse_e(self.run(command, **kwargs)) + return parse.parse_e(self.run(command, **kwargs)) def ecpchg(self, **kwargs): """Optimizes degree-of-freedom usage in a coupled acoustic model. diff --git a/src/ansys/mapdl/core/_commands/preproc/keypoints.py b/src/ansys/mapdl/core/_commands/preproc/keypoints.py index a68e7192d3..37ce6b2a7a 100644 --- a/src/ansys/mapdl/core/_commands/preproc/keypoints.py +++ b/src/ansys/mapdl/core/_commands/preproc/keypoints.py @@ -20,8 +20,6 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -import re - from ansys.mapdl.core._commands import parse @@ -70,16 +68,7 @@ def k(self, npt="", x="", y="", z="", **kwargs) -> int: """ command = f"K,{npt},{x},{y},{z}" - msg = self.run(command, **kwargs) - - if msg: - if not re.search(r"KEYPOINT NUMBER", msg): - res = re.search(r"(KEYPOINT\s*)([0-9]+)", msg) - else: - res = re.search(r"(KEYPOINT NUMBER =\s*)([0-9]+)", msg) - - if res: - return int(res.group(2)) + return parse.parse_k(self.run(command, **kwargs)) def kbetw(self, kp1="", kp2="", kpnew="", type_="", value="", **kwargs) -> int: """Creates a keypoint between two existing keypoints. @@ -418,11 +407,8 @@ def kl(self, nl1="", ratio="", nk1="", **kwargs) -> int: 1 """ - msg = self.run(f"KL,{nl1},{ratio},{nk1}", **kwargs) - if msg: - res = re.search(r"KEYPOINT\s+(\d+)\s+", msg) - if res is not None: - return int(res.group(1)) + cmd = f"KL,{nl1},{ratio},{nk1}" + return parse.parse_kl(self.run(cmd, **kwargs)) def klist(self, np1="", np2="", ninc="", lab="", **kwargs): """Lists the defined keypoints or hard points. @@ -565,7 +551,7 @@ def kmove( command = f"KMOVE,{npt},{kc1},{x1},{y1},{z1},{kc2},{x2},{y2},{z2}" return self.run(command, **kwargs) - def knode(self, npt="", node="", **kwargs) -> int: + def knode(self, npt="", node="", **kwargs) -> str: """Defines a keypoint at an existing node location. APDL Command: KNODE @@ -595,11 +581,8 @@ def knode(self, npt="", node="", **kwargs) -> int: 1 """ - msg = self.run(f"KNODE,{npt},{node}", **kwargs) - if msg: - res = re.search(r"KEYPOINT NUMBER =\s+(\d+)", msg) - if res is not None: - return int(res.group(1)) + cmd = f"KNODE,{npt},{node}" + return parse.parse_knode(self.run(cmd, **kwargs)) def kplot(self, np1="", np2="", ninc="", lab="", **kwargs): """Displays the selected keypoints. diff --git a/src/ansys/mapdl/core/_commands/preproc/material_data_tables.py b/src/ansys/mapdl/core/_commands/preproc/material_data_tables.py index a5769de079..a0c1c4dc33 100644 --- a/src/ansys/mapdl/core/_commands/preproc/material_data_tables.py +++ b/src/ansys/mapdl/core/_commands/preproc/material_data_tables.py @@ -420,12 +420,7 @@ def tbdele(self, lab="", mat1="", mat2="", inc="", tbopt="", **kwargs): This command is also valid in the solution processor (:meth:`mapdl.slashsolu() `), but is not intended for changing material behaviors between load steps. """ - command = "TBDELE,%s,%s,%s,%s" % ( - str(lab), - str(mat1), - str(mat2), - str(inc), - ) + command = f"TBDELE,{lab},{mat1},{mat2},{inc},{tbopt}" return self.run(command, **kwargs) def tbeo(self, par="", value="", **kwargs): diff --git a/src/ansys/mapdl/core/_commands/preproc/nodes.py b/src/ansys/mapdl/core/_commands/preproc/nodes.py index f0407732f3..48f2a43f78 100644 --- a/src/ansys/mapdl/core/_commands/preproc/nodes.py +++ b/src/ansys/mapdl/core/_commands/preproc/nodes.py @@ -20,8 +20,6 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -import re - from ansys.mapdl.core._commands import parse @@ -249,11 +247,7 @@ def n(self, node="", x="", y="", z="", thxy="", thyz="", thzx="", **kwargs) -> i """ command = f"N,{node},{x},{y},{z},{thxy},{thyz},{thzx}" - msg = self.run(command, **kwargs) - if msg: - res = re.search(r"(NODE\s*)([0-9]+)", msg) - if res is not None: - return int(res.group(2)) + return parse.parse_n(self.run(command, **kwargs)) def naxis(self, action="", val="", **kwargs): """Generates nodes for general axisymmetric element sections. diff --git a/src/ansys/mapdl/core/_commands/preproc/special_purpose.py b/src/ansys/mapdl/core/_commands/preproc/special_purpose.py index 4679846207..6f36494d18 100644 --- a/src/ansys/mapdl/core/_commands/preproc/special_purpose.py +++ b/src/ansys/mapdl/core/_commands/preproc/special_purpose.py @@ -205,7 +205,7 @@ def cint( command = f"CINT,{action},{par1},{par2},{par3},{par4},{par5},{par6},{par7}" return self.run(command, **kwargs) - def cycexpand(self, wn="", option="", value1="", value2=""): + def cycexpand(self, wn="", option="", value1="", value2="", **kwargs): """Graphically expands displacements, stresses and strains of a cyclically symmetric model. diff --git a/src/ansys/mapdl/core/_commands/session/run_controls.py b/src/ansys/mapdl/core/_commands/session/run_controls.py index 6981a401e9..29fea7ec2d 100644 --- a/src/ansys/mapdl/core/_commands/session/run_controls.py +++ b/src/ansys/mapdl/core/_commands/session/run_controls.py @@ -664,7 +664,7 @@ def sys(self, string="", **kwargs): This command is valid in any processor. """ - command = "/SYS,%s" % (str(string)) + command = f"/SYS,{string}" return self.run(command, **kwargs) def unpause(self, **kwargs): diff --git a/src/ansys/mapdl/core/_commands/solution/analysis_options.py b/src/ansys/mapdl/core/_commands/solution/analysis_options.py index eab0a0d6cc..e5b87da1e8 100644 --- a/src/ansys/mapdl/core/_commands/solution/analysis_options.py +++ b/src/ansys/mapdl/core/_commands/solution/analysis_options.py @@ -1167,7 +1167,7 @@ def ddoption(self, decomp="", nprocpersol="", numsolforlp="", **kwargs): domains by expanding the smaller domains from the reduced graph back to the original mesh. """ - command = f"DDOPTION,{decomp}" + command = f"DDOPTION,{decomp},{nprocpersol},{numsolforlp}" return self.run(command, **kwargs) def dmpext( diff --git a/src/ansys/mapdl/core/mapdl_extended.py b/src/ansys/mapdl/core/mapdl_extended.py index 0e5c859eb8..06a7dd619e 100644 --- a/src/ansys/mapdl/core/mapdl_extended.py +++ b/src/ansys/mapdl/core/mapdl_extended.py @@ -1382,7 +1382,7 @@ def cmplot(self, label: str = "", entity: str = "", keyword: str = "", **kwargs) return output @wraps(_MapdlCore.inquire) - def inquire(self, strarray="", func="", arg1="", arg2=""): + def inquire(self, strarray="", func="", arg1="", arg2="", **kwargs): """Wraps original INQUIRE function""" func_options = [ "LOGIN", @@ -1424,7 +1424,9 @@ def inquire(self, strarray="", func="", arg1="", arg2=""): f"The arguments (strarray='{strarray}', func='{func}') are not valid." ) - response = self.run(f"/INQUIRE,{strarray},{func},{arg1},{arg2}", mute=False) + response = self.run( + f"/INQUIRE,{strarray},{func},{arg1},{arg2}", mute=False, **kwargs + ) if func.upper() in [ "ENV", "TITLE", @@ -1463,7 +1465,7 @@ def lgwrite(self, fname="", ext="", kedit="", remove_grpc_extra=True, **kwargs): fname_ = self._get_file_name(fname=file_, ext=ext_) # generate the log and download if necessary - output = super().lgwrite(fname=fname_, kedit=kedit, **kwargs) + output = super().lgwrite(fname=fname_, ext="", kedit=kedit, **kwargs) # Let's download the file to the location self._download(fname_, fname) diff --git a/src/ansys/mapdl/core/mapdl_grpc.py b/src/ansys/mapdl/core/mapdl_grpc.py index ea0aac63d8..9ee6b7a84a 100644 --- a/src/ansys/mapdl/core/mapdl_grpc.py +++ b/src/ansys/mapdl/core/mapdl_grpc.py @@ -1360,7 +1360,7 @@ def list_files(self, refresh_cache: bool = True) -> List[str]: return files @supress_logging - def sys(self, cmd): + def sys(self, cmd, **kwargs): """Pass a command string to the operating system. APDL Command: /SYS @@ -1393,7 +1393,7 @@ def sys(self, cmd): """ # always redirect system output to a temporary file tmp_file = f"__tmp_sys_out_{random_string()}__" - super().sys(f"{cmd} > {tmp_file}") + super().sys(f"{cmd} > {tmp_file}", **kwargs) if self._local: # no need to download when local with open(os.path.join(self.directory, tmp_file)) as fobj: obj = fobj.read() diff --git a/tests/test_commands.py b/tests/test_commands.py index 2aa4ec8b19..51573fb4a0 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -21,6 +21,7 @@ # SOFTWARE. import inspect +from unittest.mock import patch import numpy as np import pytest @@ -1045,3 +1046,87 @@ def test_flist(self, mapdl): assert not flist_result.empty assert flist_result.compare(df_f).empty + + +class Test_MAPDL_commands: + SKIP = [ + "aplot", + "cfopen", + "cmatrix", + "create", + "end", + "eplot", + "geometry", + "input", + "kplot", + "lgwrite", + "lplot", + "lsread", + "mwrite", + "nplot", + "sys", + "vplot", + "vwrite", + ] + + @staticmethod + def fake_wrap(*args, **kwags): + return args[0] + + MAPDL_cmds = [each for each in dir(Commands) if not each.startswith("_")] + + @pytest.mark.parametrize("cmd", MAPDL_cmds) + @patch("ansys.mapdl.core.mapdl_grpc.MapdlGrpc._send_command", fake_wrap) + # Skip post processing the plot in PLESOL commands like. + @patch("ansys.mapdl.core.mapdl_core.PLOT_COMMANDS", []) + # skip retrieving value + @patch("ansys.mapdl.core.mapdl_grpc.MapdlGrpc.scalar_param", fake_wrap) + # Skip output the entity id after geometry manipulation + @patch("ansys.mapdl.core._commands.parse.parse_a", fake_wrap) + @patch("ansys.mapdl.core._commands.parse.parse_e", fake_wrap) + @patch("ansys.mapdl.core._commands.parse.parse_et", fake_wrap) + @patch("ansys.mapdl.core._commands.parse.parse_k", fake_wrap) + @patch("ansys.mapdl.core._commands.parse.parse_knode", fake_wrap) + @patch("ansys.mapdl.core._commands.parse.parse_kdist", fake_wrap) + @patch("ansys.mapdl.core._commands.parse.parse_kl", fake_wrap) + @patch("ansys.mapdl.core._commands.parse.parse_kpoint", fake_wrap) + @patch("ansys.mapdl.core._commands.parse.parse_line_no", fake_wrap) + @patch("ansys.mapdl.core._commands.parse.parse_line_nos", fake_wrap) + @patch("ansys.mapdl.core._commands.parse.parse_n", fake_wrap) + @patch("ansys.mapdl.core._commands.parse.parse_ndist", fake_wrap) + @patch("ansys.mapdl.core._commands.parse.parse_output_areas", fake_wrap) + @patch("ansys.mapdl.core._commands.parse.parse_output_volume_area", fake_wrap) + @patch("ansys.mapdl.core._commands.parse.parse_v", fake_wrap) + def test_command(self, mapdl, cmd): + func = getattr(mapdl, cmd) + + # Avoid wraps + while hasattr(func, "__wrapped__"): + func = func.__wrapped__ + + if cmd in self.SKIP: + pytest.skip("This function is overwritten in a subclass.") + + parm = inspect.signature(func).parameters + assert "kwargs" in parm, "'kwargs' argument is missing in function signature." + + args = [f"arg{i}" for i in range(len(parm) - 1)] # 3 = self, cmd, kwargs + + if list(parm)[0].lower() == "self": + args = args[:-1] + post = func(mapdl, *args) + else: + post = func(*args) + + for arg in args: + assert arg in post + + # assert ",".join(args) in post.replace(",,", ",").replace(" ", "") + cmd_ = cmd.upper() + if cmd_.startswith("SLASH"): + cmd_ = cmd_.replace("SLASH_", "/").replace("SLASH", "/") + + if cmd_.startswith("STAR"): + cmd_ = cmd_.replace("STAR", "*") + + assert cmd_ in post.upper()