diff --git a/.conda/meta.yaml b/.conda/meta.yaml index 7c594a77..9cac60b7 100644 --- a/.conda/meta.yaml +++ b/.conda/meta.yaml @@ -16,7 +16,7 @@ build: script: "{{ PYTHON }} -m pip install . --no-deps -vv && cd .. && {{ PYTHON }} -m sparc.download_data" entry_points: - "sparc-ase = sparc.cli:main" - + requirements: host: diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 221bd841..b05fb46c 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -16,7 +16,7 @@ Provide a minimal list of settings / codes to help us debug, such as - Version (git commit hash) of `SPARC-X-API` - `SPARC` C-code version (see the SPARC .out file header) - Your platform and architecture - + **Expected behavior** What is the code intended to achieve? diff --git a/.github/workflows/installation_test.yml b/.github/workflows/installation_test.yml index 81ee3605..b1c9a793 100644 --- a/.github/workflows/installation_test.yml +++ b/.github/workflows/installation_test.yml @@ -39,8 +39,12 @@ jobs: python -m sparc.download_data - name: Download SPARC output files to SPARC-master run: | - wget https://github.com/SPARC-X/SPARC/archive/refs/heads/master.zip - unzip master.zip + # Pin the current version of SPARC to versions before MLFF + # wget https://github.com/SPARC-X/SPARC/archive/refs/heads/master.zip + # unzip master.zip + wget -O SPARC-master.zip https://codeload.github.com/SPARC-X/SPARC/zip/3371b4401e4ebca0921fb77a02587f578f3bf3f7 + unzip SPARC-master.zip + mv SPARC-33* SPARC-master - name: Test with pytest run: | # python -m pytest -svv tests/ --cov=sparc --cov-report=json --cov-report=html @@ -53,7 +57,7 @@ jobs: COVERAGE=`cat coverage.json | jq .totals.percent_covered | xargs printf '%.*f' 0` echo "Current coverage is $COVERAGE" echo "COVPERCENT=$COVERAGE" >> $GITHUB_ENV - + - name: Lint with flake8 run: | echo $CONDA_PREFIX @@ -88,7 +92,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} RID: ${{ github.run_id }} - + test-socket: defaults: run: @@ -139,7 +143,67 @@ jobs: COVERAGE=`cat coverage.json | jq .totals.percent_covered | xargs printf '%.*f' 0` echo "Current coverage is $COVERAGE" echo "COVPERCENT=$COVERAGE" >> $GITHUB_ENV - + + - name: Lint with flake8 + run: | + echo $CONDA_PREFIX + conda info + flake8 sparc/ --count --select=E9,F63,F7,F82 --show-source --statistics + flake8 sparc/ --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + + # To be deleted once 1.0 release is done + build-linux-ase-3-22: + defaults: + run: + shell: bash -l {0} + runs-on: ubuntu-latest + strategy: + max-parallel: 5 + + steps: + - uses: actions/checkout@v3 + - uses: conda-incubator/setup-miniconda@v2 + with: + python-version: "3.10" + mamba-version: "*" + channels: conda-forge,alchem0x2a,defaults + channel-priority: true + activate-environment: sparc-api-test + - name: Install dependencies + run: | + # mamba install -c conda-forge ase>=3.22 pymatgen flake8 pytest + mamba install -c conda-forge make compilers openmpi fftw scalapack openblas + - name: Install package + run: | + pip install -e ".[test]" ase==3.22 numpy==1.24 scipy==1.10 + # Manually downgrade + # Download the external psp data + python -m sparc.download_data + - name: Download SPARC output files to SPARC-master + run: | + # TODO: merge to master + wget -O SPARC-socket.zip https://codeload.github.com/alchem0x2A/SPARC/zip/refs/heads/socket + unzip SPARC-socket.zip + - name: Compile SPARC with socket + run: | + cd SPARC-socket/src + make clean + make -j2 USE_SOCKET=1 USE_MKL=0 USE_SCALAPACK=1 DEBUG_MODE=1 + ls ../lib + - name: Test with pytest + run: | + ls ./SPARC-socket/lib/sparc + PWD=$(pwd) + export SPARC_TESTS_DIR="${PWD}/SPARC-socket/tests" + export ASE_SPARC_COMMAND="mpirun -n 1 ${PWD}/SPARC-socket/lib/sparc" + export SPARC_DOC_PATH="${PWD}/SPARC-socket/doc/.LaTeX" + coverage run -a -m pytest -svv tests/ + coverage json --omit="tests/*.py" + coverage html --omit="tests/*.py" + COVERAGE=`cat coverage.json | jq .totals.percent_covered | xargs printf '%.*f' 0` + echo "Current coverage is $COVERAGE" + echo "COVPERCENT=$COVERAGE" >> $GITHUB_ENV + - name: Lint with flake8 run: | echo $CONDA_PREFIX diff --git a/.github/workflows/update_api.yml b/.github/workflows/update_api.yml index f65f430e..6bd80435 100644 --- a/.github/workflows/update_api.yml +++ b/.github/workflows/update_api.yml @@ -59,4 +59,3 @@ jobs: gh pr create --base master --title "[PR Bot] New JSON API version ${API_VERSION}" --body "Merge new JSON API version ${API_VERSION} into master" -R ${{ github.repository_owner }}/SPARC-X-API env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..e12a22b5 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,23 @@ +# Pre-commit hooks for SPARC-X-API +# Use pre-commit rn +exclude: "^tests/outputs/|^tests/psps/|^tests/sparc-latex-.*/|^tests/archive/|^sparc/sparc_json_api/" +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.3.0 + hooks: + - id: check-yaml + exclude: ".conda/meta.yaml" + - id: end-of-file-fixer + - id: trailing-whitespace + +- repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + name: isort (python) + args: ["--profile", "black"] + +- repo: https://github.com/psf/black + rev: 22.10.0 + hooks: + - id: black diff --git a/README.md b/README.md index cf8e4767..bd0d1361 100644 --- a/README.md +++ b/README.md @@ -322,7 +322,7 @@ The reasoning and details about unit conversion can be found in the [Rules for I In order for `SPARC-X-API` to be compatible with other ASE-based DFT calculators, -there is a list of special parameters consistent with the ASE convention and uses Å / eV / GPa / fs +there is a list of special parameters consistent with the ASE convention and uses Å / eV / GPa / fs unit system: | parameter name | meaning | example | equivalent `SPARC` input | @@ -394,7 +394,7 @@ bottleneck. The underlying software architecture is shown in [Fig. 3](#fig-3-spa ![scheme-sparc-socket](doc/img/scheme_socket_hetero.png) - + **Requirements**: the SPARC binary must be manually compiled from the source code with [socket @@ -425,10 +425,10 @@ Based on the scenarios, the socket communication layer can be accessed via the f 1. **SPARC binary only** ([Fig. 5](#fig-5-different-ways-of-using-sparcs-socket-mode) **a**) - + SPARC binary with socket support can be readily coupled with any i-PI compatible socker server, such as `ase.calculators.socketio.SocketIOCalculator`, for example - + ```python from ase.calculators.socketio import SocketIOCalculator from subprocess import Popen @@ -439,17 +439,17 @@ Based on the scenarios, the socket communication layer can be accessed via the f # Single point calculations process.kill() ``` - + The end user is responsible for generating the input files and making sure the same atoms structures are used by `SocketIOCalculator` and the SPARC binary. The mode is also limited - to be run on a single computer system. + to be run on a single computer system. 2. **Local-only Mode** ([Fig. 5](#fig-5-different-ways-of-using-sparcs-socket-mode) **b**) - + Ideal for standalone calculations, this mode simulates a conventional calculator while benefiting from socket-based efficiency. - + ```python with SPARC(use_socket=True, **normal_parameters) as calc: # Execute single-point calculations @@ -464,7 +464,7 @@ Based on the scenarios, the socket communication layer can be accessed via the f message to a local SPARC binary and send results back through the socket pipe. The server side can either be a normal i-PI compatible server (such as `SocketIOCalculator`) or server-mode `sparc.SPARC` (see 4). - + Start the client by: ```python client = SPARC(use_socket=True, @@ -472,7 +472,7 @@ Based on the scenarios, the socket communication layer can be accessed via the f with client: client.run() ``` - + Or via Command-Line: ```bash python -m sparc.client -s host:port @@ -483,14 +483,14 @@ Based on the scenarios, the socket communication layer can be accessed via the f new atoms positions and parameters arrive, the client will automatically determine if it is necessary to restart the SPARC subprocess. - + 4. **Server Mode** ([Fig. 5](#fig-5-different-ways-of-using-sparcs-socket-mode) **d**) Paired with the client mode in (3), SPARC-X-API can be run as a socket server, isolated from the node that performs the computation. This can be useful for highly-distributed computational workflows. - + On the server node, run: ```python server_calc = SPARC(use_socket=True, socket_params=dict(port=31415, server_only=True), **normal_parameters) @@ -498,18 +498,18 @@ Based on the scenarios, the socket communication layer can be accessed via the f # Execute single point calculations for atoms_1 # Execute single point calculations for atoms_2 ``` - + In this case, the server will opens `0.0.0.0:31415` for connection. Make sure your server is directly accessible from the clients and the port is not occupied. The socker server is capable of receiving `raw_results` directly from the clients, making it possible to access `server_calc.raw_results` without access to the file systems on the client side. - + ### (In-progress) Controlling SPARC routines from socket interface -As shown in [Fig. 4](#fig-4-overview-of-the-sparc-protocol-as-an-extension-to-the-standard-i-pi-protocol), +As shown in [Fig. 4](#fig-4-overview-of-the-sparc-protocol-as-an-extension-to-the-standard-i-pi-protocol), the SPARC socket protocol designs allows bidirectional control of internal SPARC routines. Local- or server-mode `sparc.SPARC` calculators can communicate with the SPARC binary via functions like diff --git a/doc/advanced_topics.md b/doc/advanced_topics.md index 469235fb..74b71ced 100644 --- a/doc/advanced_topics.md +++ b/doc/advanced_topics.md @@ -4,11 +4,11 @@ The design of `SPARC-X-API` is schematically shown in the following figure ### Behind the bundle file format -Instead of parsing individual `.ion` and `.inpt` files, -the bundle format (recognized by ASE by `format="sparc"`) will +Instead of parsing individual `.ion` and `.inpt` files, +the bundle format (recognized by ASE by `format="sparc"`) will gather information from all files and check if atomic information -and calculation results can be retrieved. -The central piece for handling the bundle format is +and calculation results can be retrieved. +The central piece for handling the bundle format is `sparc.io.SpardBundle` class. You can use it to parse an existing bundle ```python @@ -28,15 +28,15 @@ bundle = SparcBundle("path/to/your-calc.sparc", mode="w") bundle._write_ion_and_inpt(atoms, label="SPARC") ``` -For each individual SPARC file (e.g. `.ion`, `.inpt`, `.static`, `.geopt`, `.aimd`), -file-specific parsers are in `sparc.sparc_parsers.` files. +For each individual SPARC file (e.g. `.ion`, `.inpt`, `.static`, `.geopt`, `.aimd`), +file-specific parsers are in `sparc.sparc_parsers.` files. Each `_read_` method will return the structured raw-data dictionary of the files. Similarly, `_write_` takes the structured dictionary as input and write the file using only relevant data. ### Behind the JSON API -The JSON API are directly parsed from the `SPARC` documentation [LaTeX files](https://github.com/SPARC-X/SPARC/tree/master/doc/.LaTeX). +The JSON API are directly parsed from the `SPARC` documentation [LaTeX files](https://github.com/SPARC-X/SPARC/tree/master/doc/.LaTeX). The JSON API file (`sparc/sparc_json_api/parameters.json`) distributed by `SPARC-X-API` is generated by: ```bash @@ -77,9 +77,9 @@ sis = SparcAPI() ### Retriving parameters from old SPARC calculations -`sparc.SPARC` calculator supports the `restart` mode which will reconstruct all -parameters, psp files and atomic information from an existing SPARC calculation and -rerun them. +`sparc.SPARC` calculator supports the `restart` mode which will reconstruct all +parameters, psp files and atomic information from an existing SPARC calculation and +rerun them. ```python from sparc import SPARC @@ -96,14 +96,14 @@ old_atoms.get_potential_energy() ### Rules for input parameters in `sparc.SPARC` calculator -When constructing the `sparc.SPARC` calculator using the syntax +When constructing the `sparc.SPARC` calculator using the syntax ```python calc = SPARC(directory="", **kwargs) ``` the parameters are handled in the following priority: 1) Parameters available to `.inpt` files (i.e. **CAPITALIZED**) have highest priority and overwrite all special inputs. They should be set directly using the atomic unit values (i.e the same value as they appear in the `.inpt` files). -2) Special inputs (i.e, `h`, `kpts`, `gpts`, `xc`, `convergence`) have second highest priority and overwrite default values. They are using the ASE unit system (i.e. Å, eV, GPa, fs). -3) If none of the parameters are provided, `SPARC` uses its default parameter set, currently +2) Special inputs (i.e, `h`, `kpts`, `gpts`, `xc`, `convergence`) have second highest priority and overwrite default values. They are using the ASE unit system (i.e. Å, eV, GPa, fs). +3) If none of the parameters are provided, `SPARC` uses its default parameter set, currently ```python {"xc": "pbe", "h": 0.25, "kpts": (1, 1, 1)} ``` @@ -114,11 +114,11 @@ Additionally, boolean inputs (i.e. `PRINT_FORCES`) can be written in both intege ### Multiple occurance of output files -In a typical SPARC calculation, there may be multiple result files in the SPARC bundle, with different suffixes (e.g. `.out`, `.out_01`, `.out_02` etc.). +In a typical SPARC calculation, there may be multiple result files in the SPARC bundle, with different suffixes (e.g. `.out`, `.out_01`, `.out_02` etc.). These files can be a result of restarted geometry optimization / AIMD or written by an ASE optimizer. When using `read_sparc` to access the files, you can add `include_all_files=True` option to parse -trajectories from all files. +trajectories from all files. ```python from sparc.io import read_sparc diff --git a/doc/changes_v0.1.md b/doc/changes_v0.1.md index d6ccaa41..b7d30d52 100644 --- a/doc/changes_v0.1.md +++ b/doc/changes_v0.1.md @@ -14,14 +14,14 @@ Nevertheless, reading calculation results generated by a v0.1 API code will not ``` which maps atoms 3, 2, 1, 0 from the SPARC .ion file order to atoms 0, 1, 2, 3 in ASE order. This is useful for systems that are constructed by ASE's `add_adsorbate` method. -3. v1.0 API accepts all SPARC internal parameters (i.e. **CAPITALIZED**) in *atomic units* for consistency reason. +3. v1.0 API accepts all SPARC internal parameters (i.e. **CAPITALIZED**) in *atomic units* for consistency reason. However, we also keep a list of "special input params" that are conventionally used in other ASE calculators, that use Å / eV / GPa / fs unit system. 4. Defining `LATVEC`, `LATVEC_SCALE`, or `CELL` via the calculator parameters is no longer encouraged. Instead, all structure changes should be made to the `Atoms` object. For more discussion please see [Advanced Topic] section. -Below are a list of v0.1 method of the `SPARC` calculator and their current status in v0.2 API. +Below are a list of v0.1 method of the `SPARC` calculator and their current status in v0.2 API. `calc` is an instance of `sparc.SPARC`. | old methods | status in v1.0 API | alternatives | diff --git a/examples/active_learning/example_finetuna_minimal.py b/examples/active_learning/example_finetuna_minimal.py index ab74a70f..58cb50a9 100644 --- a/examples/active_learning/example_finetuna_minimal.py +++ b/examples/active_learning/example_finetuna_minimal.py @@ -5,28 +5,29 @@ python example_finetuna_minimal.py """ -import torch +import argparse import os -import yaml from pathlib import Path + +import ase +import torch +import yaml +from ase.build import molecule +from ase.cluster.cubic import FaceCenteredCubic +from ase.constraints import FixAtoms from ase.io.trajectory import Trajectory from ase.optimize import BFGS -from ase.constraints import FixAtoms from finetuna.ml_potentials.finetuner_ensemble_calc import FinetunerEnsembleCalc -import ase -from ase.cluster.cubic import FaceCenteredCubic from finetuna.online_learner.online_learner import OnlineLearner -import argparse -from sparc.calculator import SPARC -from ase.build import molecule +from sparc.calculator import SPARC cpu = not torch.cuda.is_available() curdir = Path(__file__).parent config_file = curdir / "ft_config_gemnet_gpu.yml" with open(config_file, "r") as fd: configs = yaml.load(fd, Loader=yaml.FullLoader) - + checkpoint = os.environ.get("CHECKPOINT_PATH", None) if checkpoint is None: # Use default (relative path) @@ -46,7 +47,6 @@ ) -<<<<<<< HEAD # init_molecule = molecule("H2O", pbc=False, cell=[8, 8, 8]) # init_molecule.center() # init_molecule.rattle() @@ -61,7 +61,7 @@ # init_atoms.center() # init_atoms.rattle(0.05) -init_atoms = FaceCenteredCubic('Cu', surfaces, layers, latticeconstant=lc) +init_atoms = FaceCenteredCubic("Cu", surfaces, layers, latticeconstant=lc) init_atoms.cell = [12, 12, 12] init_atoms.center() init_atoms.pbc = False @@ -74,20 +74,11 @@ # atoms.calc = calc # dyn = BFGS(atoms, maxstep=0.2, trajectory="pure_bfgs.traj") # dyn.run(fmax=0.03) - + with SPARC(directory="online_coldstart", **sparc_params) as parent_calc: atoms = init_atoms.copy() -======= -init_molecule = molecule("H2O", pbc=False, cell=[8, 8, 8]) -init_molecule.center() -init_molecule.rattle() - -sparc_params = {"directory": curdir, "xc": "pbe", "h": 0.22} -with SPARC(**sparc_params) as parent_calc: ->>>>>>> f703ec5 (use smaller mesh size) onlinecalc = OnlineLearner(learner, [], ml_potential, parent_calc) atoms.calc = onlinecalc - dyn = BFGS(atoms, - maxstep=0.2) + dyn = BFGS(atoms, maxstep=0.2) dyn.run(fmax=0.03) diff --git a/setup.py b/setup.py index 754309a2..77ad5d29 100644 --- a/setup.py +++ b/setup.py @@ -11,24 +11,35 @@ "pyfakefs", "pytest-cov", "black", + "isort", "flake8", "anybadge", + "pre-commit", ] setup( name="sparc-x-api", - version="1.0.3", + version="1.0.4", python_requires=">=3.8", description="Python API for the SPARC DFT Code", author="Tian Tian, Ben Comer", author_email="alchem0x2a@gmail.com, ben.comer@gatech.edu", url="https://github.com/SPARC-X/SPARC-X-API", packages=find_packages(), - install_requires=["ase==3.22.0", "numpy>=1.23", "packaging>=20.0", "psutil>=5.0.0"], + # ASE 3.22 dependency will be deprecated in 1.1.0+ release + install_requires=["ase>=3.22.0", "numpy>=1.23", "packaging>=20.0", "psutil>=5.0.0"], entry_points={ "ase.io": [ "sparc = sparc.io", ], + # The ioformats are only compatible with ase>=3.23 + "ase.ioformats": [ + "sparc = sparc.io:format_sparc", + "sparc_ion = sparc.io:format_ion", + "sparc_static = sparc.io:format_static", + "sparc_geopt = sparc.io:format_geopt", + "sparc_aimd = sparc.io:format_aimd", + ], "console_scripts": ["sparc-ase=sparc.cli:main"], }, extras_require={ diff --git a/sparc/__init__.py b/sparc/__init__.py index 3d9f2bbf..cbcf634f 100644 --- a/sparc/__init__.py +++ b/sparc/__init__.py @@ -31,10 +31,20 @@ def __getattr__(self, name): _import_complete = False if _import_complete: + from packaging import version + from .calculator import SPARC from .io import read_sparc, register_ase_io_sparc, write_sparc - register_ase_io_sparc() + # If ase version less than 3.23, use manual register function + # Otherwise use the new entry point + if version.parse(ase.__version__) < version.parse("3.23"): + register_ase_io_sparc() + else: + # register calculator class + from ase.calculators.calculator import register_calculator_class + + register_calculator_class("sparc", SPARC) else: # If importing is not complete, any code trying to directly import # the following attributes will raise ImportError diff --git a/sparc/calculator.py b/sparc/calculator.py index 2b6bfec1..38b57697 100644 --- a/sparc/calculator.py +++ b/sparc/calculator.py @@ -10,9 +10,10 @@ import psutil from ase.atoms import Atoms from ase.calculators.calculator import Calculator, FileIOCalculator, all_changes +from ase.parallel import world +from ase.stress import full_3x3_to_voigt_6_stress from ase.units import Bohr, GPa, Hartree, eV from ase.utils import IOContext -from ase.stress import full_3x3_to_voigt_6_stress from .api import SparcAPI from .io import SparcBundle @@ -76,6 +77,9 @@ class SPARC(FileIOCalculator, IOContext): "kpts": (1, 1, 1), "h": 0.25, # Angstrom equivalent to MESH_SPACING = 0.47 } + # TODO: ASE 3.23 compatibility. should use profile + # TODO: remove the legacy command check for future releases + _legacy_default_command = "sparc not initialized" def __init__( self, @@ -248,7 +252,11 @@ def ensure_socket(self): # TODO: Exception for wrong port self.in_socket = SPARCSocketServer( port=self.socket_params["port"], - log=self.openfile(self._indir(ext=".log", label="socket"), mode="w"), + log=self.openfile( + file=self._indir(ext=".log", label="socket"), + comm=world, + mode="w", + ), parent=self, ) else: @@ -257,7 +265,11 @@ def ensure_socket(self): self.in_socket = SPARCSocketServer( unixsocket=socket_name, # TODO: make the log fd persistent - log=self.openfile(self._indir(ext=".log", label="socket"), mode="w"), + log=self.openfile( + file=self._indir(ext=".log", label="socket"), + comm=world, + mode="w", + ), parent=self, ) # TODO: add the outbound socket client @@ -269,7 +281,7 @@ def ensure_socket(self): host=self.socket_params["host"], port=self.socket_params["port"], # TODO: change later - log=self.openfile("out_socket.log"), + log=self.openfile(file="out_socket.log", comm=world), # TODO: add the log and timeout part parent_calc=self, ) @@ -294,7 +306,7 @@ def use_socket(self): @property def socket_mode(self): """The mode of the socket calculator: - + disabled: pure SPARC file IO interface local: Serves as a local SPARC calculator with socket support client: Relay SPARC calculation @@ -429,12 +441,16 @@ def _make_command(self, extras=""): Extras will add additional arguments to the self.command, e.g. -name, -socket etc + + 2024.09.05 @alchem0x2a + Note in ase>=3.23 the FileIOCalculator.command will fallback + to self._legacy_default_command, which we should set to invalid value for now. """ if isinstance(extras, (list, tuple)): extras = " ".join(extras) else: extras = extras.strip() - if self.command is None: + if (self.command is None) or (self.command == SPARC._legacy_default_command): command_env = os.environ.get("ASE_SPARC_COMMAND", None) if command_env is None: sparc_exe, mpi_exe, num_cores = _find_default_sparc() @@ -518,7 +534,9 @@ def calculate(self, atoms=None, properties=["energy"], system_changes=all_change return if self.socket_mode == "server": - self._calculate_as_server(atoms=atoms, properties=properties, system_changes=system_changes) + self._calculate_as_server( + atoms=atoms, properties=properties, system_changes=system_changes + ) return self.write_input(self.atoms, properties, system_changes) self.execute() @@ -538,24 +556,26 @@ def calculate(self, atoms=None, properties=["energy"], system_changes=all_change self.atoms.get_initial_magnetic_moments() ) - def _calculate_as_server(self, atoms=None, properties=["energy"], - system_changes=all_changes): - """Use the server component to send instructions to socket - """ + def _calculate_as_server( + self, atoms=None, properties=["energy"], system_changes=all_changes + ): + """Use the server component to send instructions to socket""" ret, raw_results = self.in_socket.calculate_new_protocol( - atoms=atoms, params=self.parameters + atoms=atoms, params=self.parameters ) self.raw_results = raw_results if "stress" not in self.results: virial_from_socket = ret.get("virial", np.zeros(6)) - stress_from_socket = -full_3x3_to_voigt_6_stress(virial_from_socket) / atoms.get_volume() + stress_from_socket = ( + -full_3x3_to_voigt_6_stress(virial_from_socket) / atoms.get_volume() + ) self.results["stress"] = stress_from_socket # Energy and forces returned in this case do not need # resorting, since they are already in the same format self.results["energy"] = ret["energy"] self.results["forces"] = ret["forces"] return - + def _calculate_with_socket( self, atoms=None, properties=["energy"], system_changes=all_changes ): @@ -597,7 +617,7 @@ def _calculate_with_socket( ) # Use the IOContext class's lazy context manager # TODO what if self.log is None - fd_log = self.openfile(self.log) + fd_log = self.openfile(file=self.log, comm=world) self.process = subprocess.Popen( cmds, shell=True, @@ -621,7 +641,9 @@ def _calculate_with_socket( # For stress information, we make sure that the stress is always present if "stress" not in self.results: virial_from_socket = ret.get("virial", np.zeros(6)) - stress_from_socket = -full_3x3_to_voigt_6_stress(virial_from_socket) / atoms.get_volume() + stress_from_socket = ( + -full_3x3_to_voigt_6_stress(virial_from_socket) / atoms.get_volume() + ) self.results["stress"] = stress_from_socket self.system_state = self._dump_system_state() return @@ -926,12 +948,12 @@ def detect_sparc_version(self): ) return version - def run_client(self, atoms=None, use_stress=False): - """Main method to start the client code - """ + """Main method to start the client code""" if not self.socket_mode == "client": - raise RuntimeError("Cannot use SPARC.run_client if the calculator is not configured in client mode!") + raise RuntimeError( + "Cannot use SPARC.run_client if the calculator is not configured in client mode!" + ) self.out_socket.run(atoms, use_stress) diff --git a/sparc/cli.py b/sparc/cli.py index 3dfd3560..cb898fb9 100644 --- a/sparc/cli.py +++ b/sparc/cli.py @@ -1,3 +1,10 @@ +# Temporary fix to recognize .sparc from commandline +from .io import __register_new_filetype + +__register_new_filetype() + + +# The cli part should be considered deprecated now. def main( prog="sparc-ase", description="ASE command line tool with SPARC support", @@ -7,19 +14,8 @@ def main( import sys from ase.cli.main import main as ase_main - from ase.io import sparc as mod_sparc - from ase.io.sparc import read_sparc as _old_read_sparc - - import sparc - - def _new_read_sparc(filename, index, **kwargs): - return _old_read_sparc(filename, index, include_all_files=True, **kwargs) - try: - sys.modules["ase.io.sparc"].read_sparc = _new_read_sparc - ase_main(prog=prog, description=description, hook=hook, args=args) - finally: - sys.modules["ase.io.sparc"].read_sparc = _old_read_sparc + ase_main(prog=prog, description=description, hook=hook, args=args) if __name__ == "__main__": diff --git a/sparc/client.py b/sparc/client.py index 35010862..2f420cc9 100644 --- a/sparc/client.py +++ b/sparc/client.py @@ -1,10 +1,12 @@ import argparse -import sparc from pathlib import Path from ase.io import read -from .io import read_sparc + +import sparc + from .calculator import SPARC +from .io import read_sparc def split_socket_name(name): @@ -25,9 +27,10 @@ def split_socket_name(name): port = int(port) unixsocket = None host = host if len(host) > 0 else "localhost" - + return host, port, unixsocket + def main(): """Running SPARC-X-API as a socket client from command line @@ -42,7 +45,7 @@ def main(): python -m sparc.client -s host:port --workdir . If the workdir is a SPARC calculation bundle, the initial atoms and parameters will be reloaded. - + 2. Start the socket client with initial atoms read from file python -m sparc.client -s host:port --atoms-from-file input.xyz @@ -55,24 +58,35 @@ def main(): usage=main.__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) - parser.add_argument("-s", "--socket", - help=("Address of the socket server " - "in the format of host:port. If host is not defined, localhost will be used.")) - parser.add_argument("-w", "--workdir", - help=("Workdir for performing the SPARC calculations"), - default=".") - parser.add_argument("--atoms-from-file", - help=("File or directory to read the input atoms information"), - default=None) - parser.add_argument("--atoms-format", - help="File format to read from external file.", - default=None, - ) + parser.add_argument( + "-s", + "--socket", + help=( + "Address of the socket server " + "in the format of host:port. If host is not defined, localhost will be used." + ), + ) + parser.add_argument( + "-w", + "--workdir", + help=("Workdir for performing the SPARC calculations"), + default=".", + ) + parser.add_argument( + "--atoms-from-file", + help=("File or directory to read the input atoms information"), + default=None, + ) + parser.add_argument( + "--atoms-format", + help="File format to read from external file.", + default=None, + ) args = parser.parse_args() host, port, unixsocket = split_socket_name(args.socket) workdir = Path(args.workdir) - #TODO: implement unixsocket - #TODO: reuse init params + # TODO: implement unixsocket + # TODO: reuse init params try: init_atoms = read_sparc(workdir) except: @@ -81,18 +95,16 @@ def main(): atoms_file = Path(args.atoms_from_file) atoms_format = args.atoms_format init_atoms = read(atoms_file, format=atoms_format) - - client_calc = SPARC(directory=workdir, - use_socket=True, - socket_params=dict( - host=host, - port=port, - server_only=False - )) + + client_calc = SPARC( + directory=workdir, + use_socket=True, + socket_params=dict(host=host, port=port, server_only=False), + ) # We should always enable use_stress, since the # SPARC implementation ensures it will also be present client_calc.run_client(atoms=init_atoms, use_stress=True) - + if __name__ == "__main__": main() diff --git a/sparc/docparser.py b/sparc/docparser.py index 129d4e30..0887d2b4 100644 --- a/sparc/docparser.py +++ b/sparc/docparser.py @@ -170,8 +170,18 @@ def parse_version(self, parse=True): ) self.version = None return - date_str = match[0].strip().replace(",", " ") - date_version = datetime.strptime(date_str, "%b %d %Y").strftime("%Y.%m.%d") + # We need to add more spacing matching in case the source code includes extra + date_str = re.sub(r"\s+", " ", match[0].strip().replace(",", " ")) + # Older version of SPARC doc may contain abbreviated month format + date_version = None + for fmt in ("%b %d %Y", "%B %d %Y"): + try: + date_version = datetime.strptime(date_str, fmt).strftime("%Y.%m.%d") + break + except Exception: + continue + if date_version is None: + raise ValueError(f"Cannot parse date time {date_str}") self.version = date_version return diff --git a/sparc/io.py b/sparc/io.py index 0a396b96..8fccc625 100644 --- a/sparc/io.py +++ b/sparc/io.py @@ -1,13 +1,5 @@ """Providing a new bundled SPARC file format -.sparc - -Many of the logics are taken from -ase.io.vasp - -ase.io.trajectory - """ -import itertools import os import re from pathlib import Path @@ -360,11 +352,15 @@ def read_raw_results(self, include_all_files=False): self.last_image = int(suffix.split("_")[1]) self.num_calculations = self.last_image + 1 + # Always make sure ion / inpt results are parsed regardless of actual calculations if include_all_files: - results = [ - self._read_results_from_index(index) - for index in range(self.num_calculations) - ] + if self.num_calculations > 0: + results = [ + self._read_results_from_index(index) + for index in range(self.num_calculations) + ] + else: + results = [self._read_results_from_index(self.last_image)] else: results = self._read_results_from_index(self.last_image) @@ -782,7 +778,7 @@ def read_psp_info(self): return psp_info -def read_sparc(filename, index=-1, include_all_files=False, **kwargs): +def read_sparc(filename, index=-1, include_all_files=True, **kwargs): """Parse a SPARC bundle, return an Atoms object or list of Atoms (image) with embedded calculator result. @@ -829,7 +825,7 @@ def write_sparc(filename, images, **kwargs): @deprecated( "Reading individual .ion file is not recommended. Please use read_sparc instead." ) -def read_ion(filename, **kwargs): +def read_sparc_ion(filename, **kwargs): """Parse an .ion file inside the SPARC bundle using a wrapper around SparcBundle The reader works only when other files (.inpt) exist. @@ -850,10 +846,14 @@ def read_ion(filename, **kwargs): return atoms +# Backward compatibity +read_ion = read_sparc_ion + + @deprecated( "Writing individual .ion file is not recommended. Please use write_sparc instead." ) -def write_ion(filename, atoms, **kwargs): +def write_sparc_ion(filename, atoms, **kwargs): """Write .ion file using the SparcBundle wrapper. This method will also create the .inpt file This is only for backward compatibility @@ -871,10 +871,14 @@ def write_ion(filename, atoms, **kwargs): return atoms +# Backward compatibility +write_ion = write_sparc_ion + + @deprecated( "Reading individual .static file is not recommended. Please use read_sparc instead." ) -def read_static(filename, index=-1, **kwargs): +def read_sparc_static(filename, index=-1, **kwargs): """Parse a .static file bundle using a wrapper around SparcBundle The reader works only when other files (.ion, .inpt) exist. @@ -889,14 +893,22 @@ def read_static(filename, index=-1, **kwargs): parent_dir = Path(filename).parent api = locate_api() sb = SparcBundle(directory=parent_dir, validator=api) + # In most of the cases the user wants to inspect all images + kwargs = kwargs.copy() + if "include_all_files" not in kwargs: + kwargs.update(include_all_files=True) atoms_or_images = sb.convert_to_ase(index=index, **kwargs) return atoms_or_images +# Backward compatibility +read_static = read_sparc_static + + @deprecated( "Reading individual .geopt file is not recommended. Please use read_sparc instead." ) -def read_geopt(filename, index=-1, **kwargs): +def read_sparc_geopt(filename, index=-1, **kwargs): """Parse a .geopt file bundle using a wrapper around SparcBundle The reader works only when other files (.ion, .inpt) exist. @@ -911,14 +923,21 @@ def read_geopt(filename, index=-1, **kwargs): parent_dir = Path(filename).parent api = locate_api() sb = SparcBundle(directory=parent_dir, validator=api) + kwargs = kwargs.copy() + if "include_all_files" not in kwargs: + kwargs.update(include_all_files=True) atoms_or_images = sb.convert_to_ase(index=index, **kwargs) return atoms_or_images +# Backward compatibility +read_geopt = read_sparc_geopt + + @deprecated( "Reading individual .aimd file is not recommended. Please use read_sparc instead." ) -def read_aimd(filename, index=-1, **kwargs): +def read_sparc_aimd(filename, index=-1, **kwargs): """Parse a .static file bundle using a wrapper around SparcBundle The reader works only when other files (.ion, .inpt) exist. @@ -933,28 +952,25 @@ def read_aimd(filename, index=-1, **kwargs): parent_dir = Path(filename).parent api = locate_api() sb = SparcBundle(directory=parent_dir, validator=api) + kwargs = kwargs.copy() + if "include_all_files" not in kwargs: + kwargs.update(include_all_files=True) atoms_or_images = sb.convert_to_ase(index=index, **kwargs) return atoms_or_images -def register_ase_io_sparc(name="sparc"): - """ - Monkey patching the ase.io and ase.io.formats - So that the following formats can be used - after `import sparc` +# Backward compatibility +read_aimd = read_sparc_aimd - ``` - from ase.io import sparc - ase.io.read("test.sparc") - atoms.write("test.sparc") - ``` - The register method only aims to work for ase 3.22 - the develope version of ase provides a much more powerful - register mechanism, we can wait. +def __register_new_filetype(): + """Register the filetype() function that allows recognizing .sparc as directory + This method should only be called for ase==3.22 compatibility and for ase-gui + In future versions of ase gui where format is supported, this method should be removed """ + import sys + from ase.io import formats as hacked_formats - from ase.io.formats import define_io_format as F from ase.io.formats import filetype as _old_filetype from ase.io.formats import ioformats @@ -972,10 +988,38 @@ def _new_filetype(filename, read=True, guess=True): return "sparc" return _old_filetype(filename, read, guess) + hacked_formats.filetype = _new_filetype + sys.modules["ase.io.formats"] = hacked_formats + return + + +@deprecated( + "register_ase_io_sparc will be deprecated for future releases. Please upgrade ase>=3.23." +) +def register_ase_io_sparc(name="sparc"): + """ + **Legacy register of io-formats for ase==3.22** + **For ase>=3.23, use the package entrypoint registration** + Monkey patching the ase.io and ase.io.formats + So that the following formats can be used + after `import sparc` + + ``` + from ase.io import sparc + ase.io.read("test.sparc") + atoms.write("test.sparc") + ``` + + The register method only aims to work for ase 3.22 + the develope version of ase provides a much more powerful + register mechanism, we can wait. + """ import sys from warnings import warn import pkg_resources + from ase.io.formats import define_io_format as F + from ase.io.formats import ioformats name = name.lower() if name in ioformats.keys(): @@ -1001,11 +1045,8 @@ def _new_filetype(filename, read=True, guess=True): ) return - hacked_formats.filetype = _new_filetype - sys.modules[f"ase.io.{name}"] = _monkey_mod - sys.modules["ase.io.formats"] = hacked_formats - # sys.modules[f"ase.io.format"] = _monkey_mod + __register_new_filetype() # Step 2: define a new format F( @@ -1066,3 +1107,57 @@ def _new_filetype(filename, read=True, guess=True): # TODO: remove print options as it may be redundant print("Successfully registered sparc formats with ase.io!") + + +# ase>=3.23 uses new ExternalIOFormat as registered entrypoints +# Please do not use from ase.io.formats import ExternalIOFormat! +# This causes circular import +try: + from ase.utils.plugins import ExternalIOFormat as EIF +except ImportError: + # Backward Compatibility + from typing import List, NamedTuple, Optional, Union + + # Copy definition from 3.23 + # Name is defined in the entry point + class ExternalIOFormat(NamedTuple): + desc: str + code: str + module: Optional[str] = None + glob: Optional[Union[str, List[str]]] = None + ext: Optional[Union[str, List[str]]] = None + magic: Optional[Union[bytes, List[bytes]]] = None + magic_regex: Optional[bytes] = None + + EIF = ExternalIOFormat + +format_sparc = EIF( + desc="SPARC .sparc bundle", + module="sparc.io", + code="+S", # read_sparc has multi-image support + ext="sparc", +) +format_ion = EIF( + desc="SPARC .ion file", + module="sparc.io", + code="1S", + ext="ion", +) +format_static = EIF( + desc="SPARC single point results", + module="sparc.io", + code="+S", + glob=["*.static", "*.static_*"], +) +format_geopt = EIF( + desc="SPARC geometric optimization results", + module="sparc.io", + code="+S", + glob=["*.geopt", "*.geopt_*"], +) +format_aimd = EIF( + desc="SPARC AIMD results", + module="sparc", + code="+S", + glob=["*.aimd*", "*.geopt_*"], +) diff --git a/sparc/psp/README.md b/sparc/psp/README.md index afc06fd4..eab32965 100644 --- a/sparc/psp/README.md +++ b/sparc/psp/README.md @@ -1,4 +1,4 @@ -This is the folder for storing the pseudo potential files with SPARC-X-API. -During installation, this folder will be intentially kept empty. +This is the folder for storing the pseudo potential files with SPARC-X-API. +During installation, this folder will be intentially kept empty. User `python -m sparc.download_data` to download the default pseudo potential files after installation. diff --git a/sparc/socketio.py b/sparc/socketio.py index 1ea93f2c..e02e2dd0 100644 --- a/sparc/socketio.py +++ b/sparc/socketio.py @@ -1,23 +1,22 @@ """A i-PI compatible socket protocol implemented in SPARC """ +import hashlib +import io import os +import pickle import random import socket import string -import io -import pickle import numpy as np from ase.calculators.socketio import ( IPIProtocol, SocketClient, + SocketClosed, SocketServer, actualunixsocketname, - SocketClosed, ) -import hashlib - def generate_random_socket_name(prefix="sparc_", length=6): """Generate a random socket name with the given prefix and a specified length of random hex characters.""" @@ -40,16 +39,17 @@ def send_string(self, msg, msglen=None): return def send_object(self, obj): - """Send an object dumped into pickle - """ + """Send an object dumped into pickle""" # We can use the highese protocol since the # python requirement >= 3.8 pkl_bytes = pickle.dumps(obj, protocol=5) nbytes = len(pkl_bytes) md5_checksum = hashlib.md5(pkl_bytes) - checksum_digest, checksum_count = (md5_checksum.digest(), - md5_checksum.digest_size) - self.sendmsg('PKLOBJ') # To distinguish from other methods like INIT + checksum_digest, checksum_count = ( + md5_checksum.digest(), + md5_checksum.digest_size, + ) + self.sendmsg("PKLOBJ") # To distinguish from other methods like INIT self.log(" pickle bytes to send: ", str(nbytes)) self.send(nbytes, np.int32) self.log(" sending pickle object....") @@ -67,7 +67,9 @@ def recv_object(self, include_header=True): """ if include_header: msg = self.recvmsg() - assert msg.strip() == "PKLOBJ", f"Incorrect header {msg} received when calling recv_object method! Please contact the developers" + assert ( + msg.strip() == "PKLOBJ" + ), f"Incorrect header {msg} received when calling recv_object method! Please contact the developers" nbytes = int(self.recv(1, np.int32)) self.log(" Will receive pickle object with n-bytes: ", nbytes) bytes_received = self._recvall(nbytes) @@ -76,10 +78,11 @@ def recv_object(self, include_header=True): digest_received = self._recvall(checksum_nbytes) digest_calc = hashlib.md5(bytes_received).digest() minlen = min(len(digest_calc), len(digest_received)) - assert digest_calc[:minlen] == digest_received[:minlen], ("MD5 checksum for the received object does not match!") + assert ( + digest_calc[:minlen] == digest_received[:minlen] + ), "MD5 checksum for the received object does not match!" obj = pickle.loads(bytes_received) return obj - def send_param(self, name, value): """Send a specific param setting to SPARC @@ -105,49 +108,47 @@ def sendinit(self): to the calculator instance. The actual behavior regarding how the calculator would be (re)-initialized, dependends on the implementation of recvinit """ - self.log(' New sendinit for SPARC protocol') - self.sendmsg('INIT') + self.log(" New sendinit for SPARC protocol") + self.sendmsg("INIT") self.send(0, np.int32) # fallback msg_chars = [ord(c) for c in "NEWPROTO"] len_msg = len(msg_chars) self.send(len_msg, np.int32) self.send(msg_chars, np.byte) # initialization string return - + def recvinit(self): - """Fallback recvinit method - """ + """Fallback recvinit method""" return super().recvinit() def calculate_new_protocol(self, atoms, params): atoms = atoms.copy() atoms.calc = None - self.log(' calculate with new protocol') + self.log(" calculate with new protocol") msg = self.status() # We don't know how NEEDINIT is supposed to work, but some codes # seem to be okay if we skip it and send the positions instead. - if msg == 'NEEDINIT': + if msg == "NEEDINIT": self.sendinit() self.send_object((atoms, params)) msg = self.status() cell = atoms.get_cell() - positions = atoms.get_positions() # Original order - assert msg == 'READY', msg + positions = atoms.get_positions() # Original order + assert msg == "READY", msg icell = np.linalg.pinv(cell).transpose() self.sendposdata(cell, icell, positions) msg = self.status() - assert msg == 'HAVEDATA', msg + assert msg == "HAVEDATA", msg e, forces, virial, morebytes = self.sendrecv_force() - r = dict(energy=e, - forces=forces, - virial=virial, - morebytes=morebytes) + r = dict(energy=e, forces=forces, virial=virial, morebytes=morebytes) # Additional data (e.g. parsed from file output) moredata = self.recv_object() return r, moredata + # TODO: make sure both calc are ok + class SPARCSocketServer(SocketServer): """We only implement the unix socket version due to simplicity @@ -232,9 +233,8 @@ def calculate_new_protocol(self, atoms, params={}): if self.protocol is None: self._accept() return self.protocol.calculate_new_protocol(atoms, params) - - + class SPARCSocketClient(SocketClient): def __init__( self, @@ -256,16 +256,14 @@ def __init__( ) sock = self.protocol.socket self.protocol = SPARCProtocol(sock, txt=log) - self.parent_calc = parent_calc # Track the actual calculator + self.parent_calc = parent_calc # Track the actual calculator # TODO: make sure the client is compatible with the default socketclient - + # We shall make NEEDINIT to be the default state # self.state = "NEEDINIT" - def calculate(self, atoms, use_stress): - """Use the calculator instance - """ + """Use the calculator instance""" if atoms.calc is None: atoms.calc = self.parent_calc return super().calculate(atoms, use_stress) @@ -288,20 +286,22 @@ def irun(self, atoms, use_stress=True): except SocketClosed: # Server closed the connection, but we want to # exit gracefully anyway - msg = 'EXIT' + msg = "EXIT" - if msg == 'EXIT': + if msg == "EXIT": # Send stop signal to clients: self.comm.broadcast(np.ones(1, bool), 0) # (When otherwise exiting, things crashed and we should # let MPI_ABORT take care of the mess instead of trying # to synchronize the exit) return - elif msg == 'STATUS': + elif msg == "STATUS": self.protocol.sendmsg(self.state) - elif msg == 'POSDATA': - assert self.state == 'READY' - assert atoms is not None, "Your SPARCSocketClient isn't properly initialized!" + elif msg == "POSDATA": + assert self.state == "READY" + assert ( + atoms is not None + ), "Your SPARCSocketClient isn't properly initialized!" cell, icell, positions = self.protocol.recvposdata() if not discard_posdata: atoms.cell[:] = cell @@ -312,18 +312,18 @@ def irun(self, atoms, use_stress=True): self.comm.broadcast(np.zeros(1, bool), 0) energy, forces, virial = self.calculate(atoms, use_stress) - self.state = 'HAVEDATA' + self.state = "HAVEDATA" yield - elif msg == 'GETFORCE': - assert self.state == 'HAVEDATA', self.state + elif msg == "GETFORCE": + assert self.state == "HAVEDATA", self.state self.protocol.sendforce(energy, forces, virial) if new_protocol: # TODO: implement more raw results raw_results = self.parent_calc.raw_results self.protocol.send_object(raw_results) - self.state = 'NEEDINIT' - elif msg == 'INIT': - assert self.state == 'NEEDINIT' + self.state = "NEEDINIT" + elif msg == "INIT": + assert self.state == "NEEDINIT" # Fall back to the default socketio bead_index, initbytes = self.protocol.recvinit() # The parts below use the new sparc protocol @@ -339,15 +339,14 @@ def irun(self, atoms, use_stress=True): atoms = recv_atoms atoms.calc = self.parent_calc discard_posdata = True - self.state = 'READY' + self.state = "READY" else: - raise KeyError('Bad message', msg) + raise KeyError("Bad message", msg) finally: self.close() - + def run(self, atoms=None, use_stress=False): - """Socket mode in SPARC should allow arbitrary start - """ + """Socket mode in SPARC should allow arbitrary start""" # As a default we shall start the SPARCSocketIO always in needinit mode if atoms is None: self.state = "NEEDINIT" diff --git a/sparc/sparc_parsers/pseudopotential.py b/sparc/sparc_parsers/pseudopotential.py index 1f510111..ded1420f 100644 --- a/sparc/sparc_parsers/pseudopotential.py +++ b/sparc/sparc_parsers/pseudopotential.py @@ -3,7 +3,7 @@ The psp8 format is defined in abinit manual https://docs.abinit.org/developers/psp8_info/ -The first +The first """ diff --git a/tests/test_000_ase_io.py b/tests/test_000_ase_io.py index 32f76171..6b00f1a5 100644 --- a/tests/test_000_ase_io.py +++ b/tests/test_000_ase_io.py @@ -15,6 +15,10 @@ def test_import_order1(): """import ase before sparc""" import ase + from packaging import version + + if version.parse(ase.__version__) >= version.parse("3.23"): + pytest.skip("") from ase.io.formats import ioformats assert "sparc" not in ioformats.keys() @@ -30,6 +34,10 @@ def test_import_order1(): def test_import_order2(): """import ase after sparc""" import ase + from packaging import version + + if version.parse(ase.__version__) >= version.parse("3.23"): + pytest.skip("") from ase.io.formats import ioformats import sparc @@ -45,6 +53,11 @@ def test_sparc_fake_write_exp(monkeypatch): """Baseline test. Make a fake write_sparc method to makesure the sparc.write_sparc works """ + import ase + from packaging import version + + if version.parse(ase.__version__) >= version.parse("3.23"): + pytest.skip("") def fake_write_sparc(filename, atoms, **kwargs): print("I'm the fake writer") @@ -69,6 +82,11 @@ def test_sparc_fake_write(monkeypatch): """Baseline test. Make a fake write_sparc method to makesure the ase.io register works """ + import ase + from packaging import version + + if version.parse(ase.__version__) >= version.parse("3.23"): + pytest.skip("") def fake_write_sparc(filename, atoms, **kwargs): print("I'm the fake writer") @@ -95,6 +113,12 @@ def test_sparc_fake_read_exp(monkeypatch, fs): """ from pathlib import Path + import ase + from packaging import version + + if version.parse(ase.__version__) >= version.parse("3.23"): + pytest.skip("") + from ase.io import sparc as _sparc import sparc @@ -125,6 +149,12 @@ def test_sparc_fake_read(monkeypatch, fs): """ from pathlib import Path + import ase + from packaging import version + + if version.parse(ase.__version__) >= version.parse("3.23"): + pytest.skip("") + from ase.io import sparc as _sparc import sparc @@ -154,6 +184,12 @@ def test_sparc_read_auto(monkeypatch, fs): """Same version of the fake read but with automatic format discover""" from pathlib import Path + import ase + from packaging import version + + if version.parse(ase.__version__) >= version.parse("3.23"): + pytest.skip("") + from ase.io import sparc as _sparc import sparc @@ -184,6 +220,11 @@ def test_ase_io_filetype(fs): Due to the implementation of ase.io.formats, single file tests should be done on non-empty files """ + import ase + from packaging import version + + if version.parse(ase.__version__) >= version.parse("3.23"): + pytest.skip("") from ase.io.formats import filetype import sparc diff --git a/tests/test_api_version.py b/tests/test_api_version.py index 4e267a08..5a282010 100644 --- a/tests/test_api_version.py +++ b/tests/test_api_version.py @@ -10,6 +10,7 @@ def test_sparc_api(monkeypatch): from sparc.api import SparcAPI from sparc.utils import locate_api + monkeypatch.delenv("SPARC_DOC_PATH", raising=False) default_ver = SparcAPI().sparc_version # No location provided, use default version diff --git a/tests/test_docparser.py b/tests/test_docparser.py index b5fb8bdb..b3173b81 100644 --- a/tests/test_docparser.py +++ b/tests/test_docparser.py @@ -70,7 +70,7 @@ def test_version_parser(fs, monkeypatch): time(¤t_time); char *c_time_str = ctime(¤t_time); // ctime includes a newline char '\n', remove manually - if (c_time_str[strlen(c_time_str)-1] == '\n') + if (c_time_str[strlen(c_time_str)-1] == '\n') c_time_str[strlen(c_time_str)-1] = '\0'; FILE *output_fp = fopen(pSPARC->OutFilename,"w"); diff --git a/tests/test_gui.py b/tests/test_gui.py index 3032fff3..73f43b3e 100644 --- a/tests/test_gui.py +++ b/tests/test_gui.py @@ -17,9 +17,15 @@ def test_cli(): def test_info(): - """Call the sparc-ase info on sparc file""" + """Call the sparc-ase info on sparc file. Be aware of the API changes in 3.22->3.23""" + import ase + from packaging import version + bundle = test_output_dir / "Cu_FCC.sparc" - proc = run(["sparc-ase", "info", f"{bundle}"], capture_output=True) + if version.parse(ase.__version__) < version.parse("3.23"): + proc = run(["sparc-ase", "info", f"{bundle}"], capture_output=True) + else: + proc = run(["sparc-ase", "info", "--files", f"{bundle}"], capture_output=True) assert proc.returncode == 0 assert "SPARC" in proc.stdout.decode("utf8") diff --git a/tests/test_output_parser.py b/tests/test_output_parser.py index fb40e67a..c9754149 100644 --- a/tests/test_output_parser.py +++ b/tests/test_output_parser.py @@ -19,7 +19,7 @@ def test_output_date_parser(): * Distributed under GNU General Public License 3 (GPL) * * Start time: Sun Feb 5 13:39:04 2023 * *************************************************************************** - Input parameters + Input parameters ***************************************************************************""" assert _read_sparc_version(header1) == "2023.02.03" header2 = """*************************************************************************** @@ -28,7 +28,7 @@ def test_output_date_parser(): * Distributed under GNU General Public License 3 (GPL) * * Start time: Sun Feb 5 13:39:04 2023 * *************************************************************************** - Input parameters + Input parameters ***************************************************************************""" assert _read_sparc_version(header2) == "2023.06.24" @@ -69,13 +69,13 @@ def test_incomplete_output(): * Distributed under GNU General Public License 3 (GPL) * * Start time: Fri Jan 19 08:30:23 2024 * *************************************************************************** - Input parameters + Input parameters *************************************************************************** -LATVEC_SCALE: 1 1 1 +LATVEC_SCALE: 1 1 1 LATVEC: -22.676713510043140 0.000000000000000 0.000000000000000 -0.000000000000000 25.561338867158440 0.000000000000000 -0.000000000000000 0.000000000000000 23.803574206414829 +22.676713510043140 0.000000000000000 0.000000000000000 +0.000000000000000 25.561338867158440 0.000000000000000 +0.000000000000000 0.000000000000000 23.803574206414829 FD_GRID: 43 49 45 FD_ORDER: 12 BC: P P P @@ -118,23 +118,23 @@ def test_incomplete_output(): PRINT_ENERGY_DENSITY: 0 OUTPUT_FILE: SPARC *************************************************************************** - Socket Mode + Socket Mode *************************************************************************** SOCKET_HOST: localhost SOCKET_PORT: 12345 SOCKET_INET: 1 SOCKET_MAX_NITER: 10000 *************************************************************************** - Cell + Cell *************************************************************************** Lattice vectors (Bohr): -22.676713510043140 0.000000000000000 0.000000000000000 -0.000000000000000 25.561338867158440 0.000000000000000 -0.000000000000000 0.000000000000000 23.803574206414829 +22.676713510043140 0.000000000000000 0.000000000000000 +0.000000000000000 25.561338867158440 0.000000000000000 +0.000000000000000 0.000000000000000 23.803574206414829 Volume: 1.3797674149E+04 (Bohr^3) Density: 1.3056802042E-03 (amu/Bohr^3), 1.4631286628E-02 (g/cc) *************************************************************************** - Parallelization + Parallelization *************************************************************************** NP_SPIN_PARAL: 1 NP_KPOINT_PARAL: 1 @@ -143,7 +143,7 @@ def test_incomplete_output(): NP_DOMAIN_PHI_PARAL: 1 1 1 EIG_SERIAL_MAXNS: 1500 *************************************************************************** - Initialization + Initialization *************************************************************************** Number of processors : 1 Mesh spacing in x-direction : 0.527365 (Bohr) @@ -167,7 +167,7 @@ def test_incomplete_output(): Estimated total memory usage : 47.35 MB Estimated memory per processor : 47.35 MB =================================================================== - Self Consistent Field (SCF#1) + Self Consistent Field (SCF#1) =================================================================== Iteration Free Energy (Ha/atom) SCF Error Timing (sec) 1 -6.0722904791E+00 3.403E-01 0.631 @@ -193,9 +193,9 @@ def test_incomplete_output(): 21 -6.0306578714E+00 2.154E-03 0.205 22 -6.0306566894E+00 1.092E-03 0.197 23 -6.0306580555E+00 4.574E-04 0.197 -Total number of SCF: 23 +Total number of SCF: 23 ==================================================================== - Energy and force calculation + Energy and force calculation ==================================================================== Free energy per atom : -6.0306580555E+00 (Ha/atom) Total free energy : -1.8091974166E+01 (Ha) @@ -211,7 +211,7 @@ def test_incomplete_output(): Maximum stress : 5.1281197310E+00 (GPa) Time for stress calculation : 0.022 (sec) =================================================================== - Self Consistent Field (SCF#2) + Self Consistent Field (SCF#2) =================================================================== Iteration Free Energy (Ha/atom) SCF Error Timing (sec) 1 -6.0403347565E+00 2.197E-01 0.303 diff --git a/tests/test_socket.py b/tests/test_socket.py index 7df338bd..0b4278f2 100644 --- a/tests/test_socket.py +++ b/tests/test_socket.py @@ -19,13 +19,13 @@ The filename shared by .inpt file and .ion file (without extension) -OPTIONS: +OPTIONS: -h, --help Display help (from command line). -n -c -a - -socket + -socket can be either : or :UNIX. Note: socket (driver) mode is an experimental feature. @@ -36,8 +36,8 @@ The example command runs sparc with 8 cores, with input file named test.inpt, and ion file named test.ion. -NOTE: - This is a short description of the usage of SPARC. For a detailed +NOTE: + This is a short description of the usage of SPARC. For a detailed discription, refer to the manual online at https://github.com/SPARC-X/SPARC/tree/master/doc @@ -54,7 +54,7 @@ The filename shared by .inpt file and .ion file (without extension) -OPTIONS: +OPTIONS: -h, --help Display help (from command line). -n @@ -68,11 +68,11 @@ The example command runs sparc with 8 cores, with input file named test.inpt, and ion file named test.ion. -NOTE: - This is a short description of the usage of SPARC. For a detailed +NOTE: + This is a short description of the usage of SPARC. For a detailed discription, refer to the manual online at - https://github.com/SPARC-X/SPARC/tree/master/doc + https://github.com/SPARC-X/SPARC/tree/master/doc """ diff --git a/tests/test_sparc_bundle.py b/tests/test_sparc_bundle.py index 71716756..a88769a1 100644 --- a/tests/test_sparc_bundle.py +++ b/tests/test_sparc_bundle.py @@ -211,7 +211,7 @@ def test_bundle_write_multi(): from sparc.io import read_sparc, write_sparc with tempfile.TemporaryDirectory() as tmpdir: - tmpdir = Path(tmpdir) + tmpdir = Path("test-111") testbundle = tmpdir / "test.sparc" os.makedirs(testbundle, exist_ok=True) atoms = bulk("Cu") * [4, 4, 4] diff --git a/tests/test_static_parser.py b/tests/test_static_parser.py index 7f7da38b..196e704e 100644 --- a/tests/test_static_parser.py +++ b/tests/test_static_parser.py @@ -21,7 +21,7 @@ def test_static_parser(): with open(static_file, "w") as fd: fd.write( """*************************************************************************** - Atom positions + Atom positions *************************************************************************** Fractional coordinates of Fe: 0.0000000000 0.0000000000 0.0000000000 @@ -30,9 +30,9 @@ def test_static_parser(): Atomic forces (Ha/Bohr): 8.0738249305E-01 3.7399117306E-01 -3.5796157735E-01 -8.0738249305E-01 -3.7399117306E-01 3.5796157735E-01 -Stress (GPa): - -2.1918863425E+04 1.3932450782E+03 -5.1023512490E+01 - 1.3932450782E+03 -2.1975897437E+04 -1.2676410947E+02 +Stress (GPa): + -2.1918863425E+04 1.3932450782E+03 -5.1023512490E+01 + 1.3932450782E+03 -2.1975897437E+04 -1.2676410947E+02 -5.1023512490E+01 -1.2676410947E+02 -2.2380745784E+04 """ ) @@ -58,7 +58,7 @@ def test_static_parser_missing_fields(): with open(static_file, "w") as fd: fd.write( """*************************************************************************** - Atom positions + Atom positions *************************************************************************** Fractional coordinates of Al: 0.0000000000 0.0000000000 0.0000000000 @@ -127,7 +127,7 @@ def test_static_multi_image_same_cell(): with open(static_file, "w") as fd: fd.write( """*************************************************************************** - Atom positions (socket step 1) + Atom positions (socket step 1) *************************************************************************** Fractional coordinates of Al: 0.0245290938 0.9931721333 0.0319846198 @@ -135,21 +135,21 @@ def test_static_multi_image_same_cell(): 0.5779858173 0.0378980123 0.4768160790 0.5267930889 0.4771151753 0.9770010000 Lattice (Bohr): - 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 - 0.0000000000E+00 7.6533908096E+00 0.0000000000E+00 - 0.0000000000E+00 0.0000000000E+00 7.6533908096E+00 + 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 + 0.0000000000E+00 7.6533908096E+00 0.0000000000E+00 + 0.0000000000E+00 0.0000000000E+00 7.6533908096E+00 Total free energy (Ha): -9.043005588833887E+00 Atomic forces (Ha/Bohr): 4.5944553076E-03 -2.9203865816E-04 -9.9557951003E-03 -2.3316843645E-03 5.5280429722E-03 4.4777314303E-03 -4.0851154286E-03 -1.3853146416E-02 5.0853960964E-03 1.8223444855E-03 8.6171421021E-03 3.9266757367E-04 -Stress (GPa): - -2.1676825726E+01 -2.6949376288E-01 8.3753040021E-01 - -2.6949376288E-01 1.0418218149E+01 4.4819979749E-03 +Stress (GPa): + -2.1676825726E+01 -2.6949376288E-01 8.3753040021E-01 + -2.6949376288E-01 1.0418218149E+01 4.4819979749E-03 8.3753040021E-01 4.4819979749E-03 -2.5633685934E+01 *************************************************************************** - Atom positions (socket step 2) + Atom positions (socket step 2) *************************************************************************** Fractional coordinates of Al: 0.0246648938 0.9931635012 0.0316903531 @@ -157,21 +157,21 @@ def test_static_multi_image_same_cell(): 0.5778650741 0.0374885506 0.4769663901 0.5268469531 0.4773698741 0.9770126049 Lattice (Bohr): - 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 - 0.0000000000E+00 7.6533908096E+00 0.0000000000E+00 - 0.0000000000E+00 0.0000000000E+00 7.6533908096E+00 + 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 + 0.0000000000E+00 7.6533908096E+00 0.0000000000E+00 + 0.0000000000E+00 0.0000000000E+00 7.6533908096E+00 Total free energy (Ha): -9.043161352197208E+00 Atomic forces (Ha/Bohr): 4.3923550661E-03 -2.4509690104E-04 -9.8419548499E-03 -2.2709031436E-03 5.4167018956E-03 4.4134407123E-03 -3.9970707817E-03 -1.3709267553E-02 4.9648744789E-03 1.8756188591E-03 8.5376625582E-03 4.6363965864E-04 -Stress (GPa): - -2.1692356285E+01 -2.6724550796E-01 8.2319727453E-01 - -2.6724550796E-01 1.0434880328E+01 3.2822223336E-03 +Stress (GPa): + -2.1692356285E+01 -2.6724550796E-01 8.2319727453E-01 + -2.6724550796E-01 1.0434880328E+01 3.2822223336E-03 8.2319727453E-01 3.2822223336E-03 -2.5491008099E+01 *************************************************************************** - Atom positions (socket step 3) + Atom positions (socket step 3) *************************************************************************** Fractional coordinates of Al: 0.0294363605 0.9928972494 0.0209989185 @@ -179,18 +179,18 @@ def test_static_multi_image_same_cell(): 0.5735230074 0.0225960074 0.4823597926 0.5288844593 0.4866444395 0.9775162642 Lattice (Bohr): - 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 - 0.0000000000E+00 7.6533908096E+00 0.0000000000E+00 - 0.0000000000E+00 0.0000000000E+00 7.6533908096E+00 + 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 + 0.0000000000E+00 7.6533908096E+00 0.0000000000E+00 + 0.0000000000E+00 0.0000000000E+00 7.6533908096E+00 Total free energy (Ha): -9.047001594631228E+00 Atomic forces (Ha/Bohr): 2.4156119100E-03 1.2164081954E-03 -6.0736428757E-03 -2.4501250186E-03 2.5307557367E-03 2.0646849306E-03 -2.2623980616E-03 -8.4907372223E-03 2.2296183039E-03 2.2969111703E-03 4.7435732902E-03 1.7793396413E-03 -Stress (GPa): - -2.0388752465E+01 -1.3826995208E-01 3.3893431251E-01 - -1.3826995208E-01 1.0147311461E+01 -6.6073109432E-03 +Stress (GPa): + -2.0388752465E+01 -1.3826995208E-01 3.3893431251E-01 + -1.3826995208E-01 1.0147311461E+01 -6.6073109432E-03 3.3893431251E-01 -6.6073109432E-03 -2.2513420091E+01 """ ) @@ -225,7 +225,7 @@ def test_static_multi_image_diff_cell(): with open(static_file, "w") as fd: fd.write( """*************************************************************************** - Atom positions (socket step 1) + Atom positions (socket step 1) *************************************************************************** Fractional coordinates of Al: 0.0435568480 0.0098804249 0.0241663700 @@ -233,21 +233,21 @@ def test_static_multi_image_diff_cell(): 0.5234589733 -0.0037372150 0.4974513864 0.5101382346 0.5035566314 0.0359079878 Lattice (Bohr): - 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 - 0.0000000000E+00 7.6533908096E+00 0.0000000000E+00 - 0.0000000000E+00 0.0000000000E+00 7.6533908096E+00 + 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 + 0.0000000000E+00 7.6533908096E+00 0.0000000000E+00 + 0.0000000000E+00 0.0000000000E+00 7.6533908096E+00 Total free energy (Ha): -9.041897494954259E+00 Atomic forces (Ha/Bohr): -6.3263660275E-03 4.1224438525E-03 -9.7573141360E-03 -1.1198940824E-02 -9.4391851504E-03 1.3724648200E-02 7.1551844744E-03 6.8108725330E-03 8.1322757442E-03 1.0370122377E-02 -1.4941312350E-03 -1.2099609809E-02 -Stress (GPa): - -1.2943266427E+01 -7.3004908329E-01 1.0042725753E+00 - -7.3004908329E-01 -1.3976465331E+01 5.9432478434E-01 +Stress (GPa): + -1.2943266427E+01 -7.3004908329E-01 1.0042725753E+00 + -7.3004908329E-01 -1.3976465331E+01 5.9432478434E-01 1.0042725753E+00 5.9432478434E-01 -8.2525496305E+00 *************************************************************************** - Atom positions (socket step 2) + Atom positions (socket step 2) *************************************************************************** Fractional coordinates of Al: 0.0401072929 -0.0151050966 -0.0130412778 @@ -255,21 +255,21 @@ def test_static_multi_image_diff_cell(): 0.5430817720 -0.0187952321 0.5078775085 0.4938427068 0.5361014305 -0.0508676718 Lattice (Bohr): - 7.7299247177E+00 0.0000000000E+00 0.0000000000E+00 - 0.0000000000E+00 7.7299247177E+00 0.0000000000E+00 - 0.0000000000E+00 0.0000000000E+00 7.7299247177E+00 + 7.7299247177E+00 0.0000000000E+00 0.0000000000E+00 + 0.0000000000E+00 7.7299247177E+00 0.0000000000E+00 + 0.0000000000E+00 0.0000000000E+00 7.7299247177E+00 Total free energy (Ha): -9.043789471327491E+00 Atomic forces (Ha/Bohr): -4.5160329928E-03 1.0836654669E-02 -2.0034847850E-03 7.8472069094E-03 -1.0356602905E-02 4.6601556005E-03 -5.6452194926E-03 1.4120642271E-02 -9.1708495469E-03 2.3140455760E-03 -1.4600694035E-02 6.5141787315E-03 -Stress (GPa): - -7.3542604262E+00 1.8630500028E+00 -7.3044231388E-01 - 1.8630500028E+00 -2.2936813817E+01 1.6489555596E+00 +Stress (GPa): + -7.3542604262E+00 1.8630500028E+00 -7.3044231388E-01 + 1.8630500028E+00 -2.2936813817E+01 1.6489555596E+00 -7.3044231388E-01 1.6489555596E+00 -9.3987769886E-01 *************************************************************************** - Atom positions (socket step 3) + Atom positions (socket step 3) *************************************************************************** Fractional coordinates of Al: -0.0102903172 -0.0013893044 -0.0527455826 @@ -277,18 +277,18 @@ def test_static_multi_image_diff_cell(): 0.5124168251 -0.0307478540 0.4738777230 0.4775553675 0.5136161492 0.0565977287 Lattice (Bohr): - 7.8064586258E+00 0.0000000000E+00 0.0000000000E+00 - 0.0000000000E+00 7.8064586258E+00 0.0000000000E+00 - 0.0000000000E+00 0.0000000000E+00 7.8064586258E+00 + 7.8064586258E+00 0.0000000000E+00 0.0000000000E+00 + 0.0000000000E+00 7.8064586258E+00 0.0000000000E+00 + 0.0000000000E+00 0.0000000000E+00 7.8064586258E+00 Total free energy (Ha): -9.049938894899322E+00 Atomic forces (Ha/Bohr): 3.6109364340E-03 -2.6690542034E-03 -4.0562261364E-03 -1.3775701683E-02 4.3698977978E-03 5.4623877892E-03 2.2823680670E-03 5.1324965010E-03 4.1411554263E-03 7.8823971824E-03 -6.8333400954E-03 -5.5473170791E-03 -Stress (GPa): - 5.3653952236E+00 5.5029845435E-01 1.4612463857E+00 - 5.5029845435E-01 8.7131123988E-01 -8.1756988882E-01 +Stress (GPa): + 5.3653952236E+00 5.5029845435E-01 1.4612463857E+00 + 5.5029845435E-01 8.7131123988E-01 -8.1756988882E-01 1.4612463857E+00 -8.1756988882E-01 -3.3530577155E+01 """ ) @@ -320,7 +320,7 @@ def test_static_incomplete(): with tempfile.TemporaryDirectory() as tmpdir: tmpdir = Path(tmpdir) content = """*************************************************************************** - Atom positions (socket step 1) + Atom positions (socket step 1) *************************************************************************** Fractional coordinates of Al: 0.0435568469 0.0098804247 0.0241663704 @@ -328,21 +328,21 @@ def test_static_incomplete(): 0.5234589728 -0.0037372148 0.4974513852 0.5101382346 0.5035566321 0.0359079877 Lattice (Bohr): - 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 - 0.0000000000E+00 7.6533908096E+00 0.0000000000E+00 - 0.0000000000E+00 0.0000000000E+00 7.6533908096E+00 + 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 + 0.0000000000E+00 7.6533908096E+00 0.0000000000E+00 + 0.0000000000E+00 0.0000000000E+00 7.6533908096E+00 Total free energy (Ha): -9.041897494881022E+00 Atomic forces (Ha/Bohr): -6.3263659031E-03 4.1224440526E-03 -9.7573144850E-03 -1.1198940888E-02 -9.4391853227E-03 1.3724648407E-02 7.1551844772E-03 6.8108726360E-03 8.1322760704E-03 1.0370122314E-02 -1.4941313660E-03 -1.2099609992E-02 -Stress (GPa): - -1.2943266261E+01 -7.3004908368E-01 1.0042725771E+00 - -7.3004908368E-01 -1.3976465176E+01 5.9432479567E-01 +Stress (GPa): + -1.2943266261E+01 -7.3004908368E-01 1.0042725771E+00 + -7.3004908368E-01 -1.3976465176E+01 5.9432479567E-01 1.0042725771E+00 5.9432479567E-01 -8.2525500281E+00 *************************************************************************** - Atom positions (socket step 2) + Atom positions (socket step 2) *************************************************************************** Fractional coordinates of Al: 0.0401072938 -0.0151050963 -0.0130412790 @@ -350,21 +350,21 @@ def test_static_incomplete(): 0.5430817728 -0.0187952321 0.5078775086 0.4938427062 0.5361014296 -0.0508676716 Lattice (Bohr): - 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 - 0.0000000000E+00 7.6533908096E+00 0.0000000000E+00 - 0.0000000000E+00 0.0000000000E+00 7.6533908096E+00 + 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 + 0.0000000000E+00 7.6533908096E+00 0.0000000000E+00 + 0.0000000000E+00 0.0000000000E+00 7.6533908096E+00 Total free energy (Ha): -9.039359835390405E+00 Atomic forces (Ha/Bohr): -5.6420435958E-03 1.2313126326E-02 -2.8998717685E-03 9.5779301732E-03 -1.1726647745E-02 5.8218036947E-03 -7.0526004868E-03 1.5921660989E-02 -1.0900664252E-02 3.1167139094E-03 -1.6508139571E-02 7.9787323255E-03 -Stress (GPa): - -1.1079730116E+01 2.1369400165E+00 -5.5506801999E-01 - 2.1369400165E+00 -2.4342715311E+01 1.9045396297E+00 +Stress (GPa): + -1.1079730116E+01 2.1369400165E+00 -5.5506801999E-01 + 2.1369400165E+00 -2.4342715311E+01 1.9045396297E+00 -5.5506801999E-01 1.9045396297E+00 -3.7041195294E+00 *************************************************************************** - Atom positions (socket step 3) + Atom positions (socket step 3) *************************************************************************** Fractional coordinates of Al: -0.0102903160 -0.0013893037 -0.0527455827 @@ -372,8 +372,8 @@ def test_static_incomplete(): 0.5124168247 -0.0307478543 0.4738777235 0.4775553679 0.5136161481 0.0565977284 Lattice (Bohr): - 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 - 0.0000000000E+00 7.6533908096E+00 0.0000000000E+00 + 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 + 0.0000000000E+00 7.6533908096E+00 0.0000000000E+00 0.0000000000E+00 0.0000000000E+00 7.6533908096E+00 """ static_file = tmpdir / "test.static"