Skip to content

Commit

Permalink
Merge pull request #23 from alchem0x2A/pbc
Browse files Browse the repository at this point in the history
Pbc
  • Loading branch information
alchem0x2A authored Sep 14, 2023
2 parents e146aee + 45ed83a commit 5c4a6e7
Show file tree
Hide file tree
Showing 46 changed files with 3,277 additions and 342 deletions.
4 changes: 1 addition & 3 deletions examples/ex0-eos.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,7 @@ def main():
calc = SPARC(h=0.25, kpts=(3, 3, 3), xc="pbe", directory="ex0-eos")
vol = atoms.get_volume()
atoms.calc = calc
eos = calculate_eos(
atoms, npoints=5, eps=0.05, trajectory="al-eos-sparc.traj"
)
eos = calculate_eos(atoms, npoints=5, eps=0.05, trajectory="al-eos-sparc.traj")
print("Original volume: Ang^3", vol)
v, e, B = eos.fit()
print("Fitted volume (Ang^3), energy (eV), modulus (eV/Ang^3)")
Expand Down
2 changes: 1 addition & 1 deletion examples/ex1-ase-optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def optimize_ase_lbfgs():
)
atoms.calc = calc
opt = BFGS(atoms)
#breakpoint()
# breakpoint()
opt.run(fmax=0.02)
e_fin = atoms.get_potential_energy()
f_fin = atoms.get_forces()
Expand Down
46 changes: 34 additions & 12 deletions sparc/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,7 @@ def get_parameter_dict(self, parameter):
parameter = parameter.upper()
if parameter not in self.parameters.keys():
raise KeyError(
f"Parameter {parameter} is not known to "
f"SPARC {self.sparc_version}!"
f"Parameter {parameter} is not known to " f"SPARC {self.sparc_version}!"
)
return self.parameters[parameter]

Expand Down Expand Up @@ -90,8 +89,13 @@ def validate_input(self, parameter, input):
float(input)
return True
except (TypeError, ValueError):
return False
try:
float(input.split()[0])
return True
except Exception:
return False
elif "array" in dtype:
# import pdb; pdb.set_trace()
if is_input_string:
if ("." in input) and ("integer" in dtype):
warn(
Expand All @@ -103,15 +107,16 @@ def validate_input(self, parameter, input):
)
)
try:
arr = np.genfromtxt(input.splitlines(), dtype=float)
# import pdb; pdb.set_trace()
arr = np.genfromtxt(input.splitlines(), dtype=float, ndmin=1)
# In valid input with nan
if np.isnan(arr).any():
arr = np.array(0.0)
except Exception:
arr = np.array(0.0)
else:
try:
arr = np.asarray(input)
arr = np.atleast_1d(np.asarray(input))
if (arr.dtype not in (int, bool)) and ("integer" in dtype):
warn(
(
Expand All @@ -124,6 +129,9 @@ def validate_input(self, parameter, input):
except Exception:
arr = np.array(0.0)
return len(arr.shape) > 0
# elif dtype == "other":
# # Any "other"-type inputs should be provided only using string
# return is_input_string
else:
raise ValueError(f"Data type {dtype} is not supported!")

Expand All @@ -132,6 +140,8 @@ def convert_string_to_value(self, parameter, string):

# Special case, the string may be a multiline string-array!
if isinstance(string, list):
# Make sure there is a line break at the end, for cases like ["2."]
string.append("")
string = [s.strip() for s in string]
string = "\n".join(string)

Expand All @@ -153,15 +163,25 @@ def convert_string_to_value(self, parameter, string):
if allow_bool_input:
value = bool(value)
elif dtype == "double":
value = float(string)
# Some inputs, like TARGET_PRESSURE, may be accepted with a unit
# like 0.0 GPa. Only accept the first part
try:
value = float(string)
except ValueError as e:
try:
value = float(string.split()[0])
except Exception:
raise e
elif dtype == "integer array":
value = np.genfromtxt(string.splitlines(), dtype=int)
value = np.genfromtxt(string.splitlines(), dtype=int, ndmin=1)
if allow_bool_input:
value = value.astype(bool)
elif dtype == "double array":
value = np.genfromtxt(string.splitlines(), dtype=float)
else:
value = np.genfromtxt(string.splitlines(), dtype=float, ndmin=1)
elif dtype == "other":
value = string
# should not happen since validate_input has gatekeeping
else:
raise ValueError(f"Unsupported type {dtype}")

return value
Expand Down Expand Up @@ -190,6 +210,10 @@ def convert_value_to_string(self, parameter, value):
string = "{:.14f}".format(float(value))
elif dtype in ("integer array", "double array"):
string = _array_to_string(value, dtype)
elif dtype == "other":
if not is_input_string:
raise ValueError("Only support string value when datatype is other")
string = value
else:
# should not happen since validate_input has gatekeeping
raise ValueError(f"Unsupported type {dtype}")
Expand All @@ -206,9 +230,7 @@ def _array_to_string(arr, format):
fmt = "%d"
elif format in ("double array", "double"):
fmt = "%.14f"
np.savetxt(
buf, arr, delimiter=" ", fmt=fmt, header="", footer="", newline="\n"
)
np.savetxt(buf, arr, delimiter=" ", fmt=fmt, header="", footer="", newline="\n")
# Return the string output of the buffer with
# whitespaces removed
return buf.getvalue().strip()
78 changes: 29 additions & 49 deletions sparc/calculator.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,9 +177,7 @@ def check_state(self, atoms, tol=1e-9):
# A few hard-written rules. Wrapping should only affect the position
if "positions" in system_changes:
atoms_copy.wrap()
new_system_changes = FileIOCalculator.check_state(
self, atoms_copy, tol=tol
)
new_system_changes = FileIOCalculator.check_state(self, atoms_copy, tol=tol)
# TODO: make sure such check only happens for PBC
# the position is wrapped, accept as the same structure
if "positions" not in new_system_changes:
Expand Down Expand Up @@ -220,16 +218,16 @@ def _make_command(self, extras=""):
self.command = command_env
return f"{self.command} {extras}"

def calculate(
self, atoms=None, properties=["energy"], system_changes=all_changes
):
def calculate(self, atoms=None, properties=["energy"], system_changes=all_changes):
"""Perform a calculation step"""
# For v1.0.0, we'll only allow pbc=True to make ourselves easier
# TODO: need to have more flexible support for pbc types and check_state
if not all(atoms.pbc):
raise NotImplementedError(
"Non-pbc atoms input has not been tested in the api. Please use pbc=True for now."
)
# Check if the user accidentally provides atoms unit cell without vacuum

if atoms and np.any(atoms.cell.cellpar()[:3] == 0):
# TODO: choose a better error name
msg = "Cannot setup SPARC calculation because at least one of the lattice dimension is zero!"
if any([bc_ is False for bc_ in atoms.pbc]):
msg += " Please add a vacuum in the non-periodic direction of your input structure."
raise ValueError(msg)
Calculator.calculate(self, atoms, properties, system_changes)
self.write_input(self.atoms, properties, system_changes)
self.execute()
Expand All @@ -248,7 +246,15 @@ def calculate(
self.atoms.get_initial_magnetic_moments()
)

# atoms = self.atoms.copy()
def get_stress(self, atoms=None):
"""Warn user the dimensionality change when using stress"""
if "stress_equiv" in self.results:
raise NotImplementedError(
"You're requesting stress in a low-dimensional system. Please use `calc.results['stress_equiv']` instead!"
)
return super().get_stress(atoms)

# atoms = self.atoms.copy()

# def update_atoms(self, atoms):
# """Update atoms after calculation if the positions are changed
Expand Down Expand Up @@ -284,9 +290,7 @@ def _check_input_exclusion(self, input_parameters, atoms=None):
)

# Rule 2: LATVEC_SCALE, CELL
if ("LATVEC_SCALE" in input_parameters) and (
"CELL" in input_parameters
):
if ("LATVEC_SCALE" in input_parameters) and ("CELL" in input_parameters):
# TODO: change to ExclusionParameterError
raise ValueError(
"LATVEC_SCALE and CELL cannot be specified simultaneously!"
Expand All @@ -296,12 +300,7 @@ def _check_input_exclusion(self, input_parameters, atoms=None):
# LATVEC, LATVEC_SCALE or CELL
# TODO: make sure the rule makes sense for molecules
if atoms is not None:
if any(
[
p in input_parameters
for p in ["LATVEC", "LATVEC_SCALE", "CELL"]
]
):
if any([p in input_parameters for p in ["LATVEC", "LATVEC_SCALE", "CELL"]]):
raise ValueError(
"When passing an ase atoms object, LATVEC, LATVEC_SCALE or CELL cannot be set simultaneously!"
)
Expand All @@ -317,10 +316,7 @@ def _check_minimal_input(self, input_parameters):
raise ValueError(f"Parameter {param} is not provided.")
# At least one from ECUT, MESH_SPACING and FD_GRID must be provided
if not any(
[
param in input_parameters
for param in ("ECUT", "MESH_SPACING", "FD_GRID")
]
[param in input_parameters for param in ("ECUT", "MESH_SPACING", "FD_GRID")]
):
raise ValueError(
"You should provide at least one of ECUT, MESH_SPACING or FD_GRID."
Expand Down Expand Up @@ -395,10 +391,7 @@ def execute(self):
errorcode = self.proc.returncode

if errorcode > 0:
msg = (
f"SPARC failed with command {command}"
f"with error code {errorcode}"
)
msg = f"SPARC failed with command {command}" f"with error code {errorcode}"
raise RuntimeError(msg)

return
Expand All @@ -416,9 +409,7 @@ def read_results(self):
"""Parse from the SparcBundle"""
# TODO: try use cache?
# self.sparc_bundle.read_raw_results()
last = self.sparc_bundle.convert_to_ase(
indices=-1, include_all_files=False
)
last = self.sparc_bundle.convert_to_ase(indices=-1, include_all_files=False)
self.atoms = last.copy()
self.results.update(last.calc.results)

Expand Down Expand Up @@ -517,10 +508,7 @@ def _convert_special_params(self, atoms=None):
"Must have an active atoms object to convert h --> gpts!"
)
if any(
[
p in self.valid_params
for p in ("FD_GRID", "ECUT", "MESH_SPACING")
]
[p in self.valid_params for p in ("FD_GRID", "ECUT", "MESH_SPACING")]
):
warn(
"You have specified one of FD_GRID, ECUT or MESH_SPACING, "
Expand All @@ -538,9 +526,7 @@ def _convert_special_params(self, atoms=None):
converted_sparc_params["FD_GRID"] = gpts
else:
# TODO: customize error
raise ValueError(
f"Input parameter gpts has invalid value {gpts}"
)
raise ValueError(f"Input parameter gpts has invalid value {gpts}")

# kpts
if "kpts" in params:
Expand All @@ -550,9 +536,7 @@ def _convert_special_params(self, atoms=None):
converted_sparc_params["KPOINT_GRID"] = kpts
else:
# TODO: customize error
raise ValueError(
f"Input parameter kpts has invalid value {kpts}"
)
raise ValueError(f"Input parameter kpts has invalid value {kpts}")

# nbands
if "nbands" in params:
Expand All @@ -563,9 +547,7 @@ def _convert_special_params(self, atoms=None):
converted_sparc_params["NSTATES"] = nbands
else:
# TODO: customize error
raise ValueError(
f"Input parameter nbands has invalid value {nbands}"
)
raise ValueError(f"Input parameter nbands has invalid value {nbands}")

# convergence is a dict
if "convergence" in params:
Expand Down Expand Up @@ -620,9 +602,7 @@ def interpret_grid_input(self, atoms, **kwargs):
def interpret_kpoint_input(self, atoms, **kwargs):
return None

@deprecated(
"Please use SPARC.set instead for setting downsampling parameter"
)
@deprecated("Please use SPARC.set instead for setting downsampling parameter")
def interpret_downsampling_input(self, atoms, **kwargs):
return None

Expand Down
Loading

0 comments on commit 5c4a6e7

Please sign in to comment.