Skip to content

Commit

Permalink
Some fixes to protocol
Browse files Browse the repository at this point in the history
  • Loading branch information
bastonero committed Dec 22, 2023
1 parent f15de60 commit 7427431
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 16 deletions.
8 changes: 6 additions & 2 deletions src/aiida_quantumespresso_hp/workflows/protocols/hubbard.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,13 @@ default_inputs:
tolerance_onsite: 0.1
tolerance_intersite: 0.01
radial_analysis:
radius_max: 10.0 # in Angstrom
thr: 0.1
nn_finder: 'crystal'
nn_inputs:
distance_cutoffs: null # in Angstrom
x_diff_weight: 0
porous_adjustment: False
radius_max: 10.0 # in Angstrom
thr: 0.01 # in Angstrom
scf:
kpoints_distance: 0.4

Expand Down
3 changes: 3 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -494,6 +494,8 @@ def generate_inputs_hubbard(generate_inputs_pw, generate_inputs_hp, generate_hub

def _generate_inputs_hubbard(hubbard_structure=None):
"""Generate default inputs for a `SelfConsistentHubbardWorkChain."""
from aiida.orm import Bool

hubbard_structure = hubbard_structure or generate_hubbard_structure()
inputs_pw = generate_inputs_pw(structure=hubbard_structure)
inputs_relax = generate_inputs_pw(structure=hubbard_structure)
Expand All @@ -508,6 +510,7 @@ def _generate_inputs_hubbard(hubbard_structure=None):
inputs_hp.pop('parent_scf')

inputs = {
'meta_convergence': Bool(True),
'hubbard_structure': hubbard_structure,
'relax': {
'base': {
Expand Down
4 changes: 4 additions & 0 deletions tests/workflows/protocols/test_hubbard/test_default.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,10 @@ max_iterations: 10
meta_convergence: true
radial_analysis:
nn_finder: crystal
nn_inputs:
distance_cutoffs: null
porous_adjustment: false
x_diff_weight: 0
radius_max: 10.0
thr: 0.01
relax:
Expand Down
23 changes: 9 additions & 14 deletions tests/workflows/test_hubbard.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# pylint: disable=no-member,redefined-outer-name
"""Tests for the `SelfConsistentHubbardWorkChain` class."""
from aiida.common import AttributeDict
from aiida.orm import Dict
from aiida.orm import Bool, Dict, Int, load_node
from plumpy import ProcessState
import pytest

Expand Down Expand Up @@ -119,8 +119,6 @@ def test_validate_inputs_invalid_inputs(generate_workchain_hubbard, generate_inp
@pytest.mark.usefixtures('aiida_profile')
def test_validate_invalid_positve_input(generate_workchain_hubbard, generate_inputs_hubbard, parameters):
"""Test `SelfConsistentHubbardWorkChain` for invalid positive inputs."""
from aiida.orm import Int

inputs = AttributeDict(generate_inputs_hubbard())
inputs.update({parameters: Int(-1)})

Expand Down Expand Up @@ -178,8 +176,6 @@ def test_magnetic_setup(generate_workchain_hubbard, generate_inputs_hubbard):
@pytest.mark.usefixtures('aiida_profile')
def test_skip_relax_iterations(generate_workchain_hubbard, generate_inputs_hubbard, generate_hp_workchain_node):
"""Test `SelfConsistentHubbardWorkChain` when skipping the first relax iterations."""
from aiida.orm import Bool, Int

inputs = generate_inputs_hubbard()
inputs['skip_relax_iterations'] = Int(1)
inputs['meta_convergence'] = Bool(True)
Expand Down Expand Up @@ -230,8 +226,6 @@ def test_skip_relax_iterations(generate_workchain_hubbard, generate_inputs_hubba
@pytest.mark.usefixtures('aiida_profile')
def test_relax_frequency(generate_workchain_hubbard, generate_inputs_hubbard):
"""Test `SelfConsistentHubbardWorkChain` when `relax_frequency` is different from 1."""
from aiida.orm import Int

inputs = generate_inputs_hubbard()
inputs['relax_frequency'] = Int(3)
process = generate_workchain_hubbard(inputs=inputs)
Expand All @@ -257,8 +251,6 @@ def test_radial_analysis(
We want to make sure `rmax` is in `hp.parameters`.
"""
from aiida.orm import load_node

inputs = generate_inputs_hubbard()
inputs['radial_analysis'] = Dict({}) # no need to specify inputs, it will use the defaults
process = generate_workchain_hubbard(inputs=inputs)
Expand All @@ -275,7 +267,6 @@ def test_radial_analysis(
@pytest.mark.usefixtures('aiida_profile')
def test_should_check_convergence(generate_workchain_hubbard, generate_inputs_hubbard):
"""Test `SelfConsistentHubbardWorkChain.should_check_convergence`."""
from aiida.orm import Bool
inputs = generate_inputs_hubbard()
inputs['meta_convergence'] = Bool(True)
process = generate_workchain_hubbard(inputs=inputs)
Expand All @@ -292,12 +283,12 @@ def test_outline_without_metaconvergence(
We want to make sure the `outputs.hubbard_structure` is the last computed.
"""
from aiida.orm import Bool
inputs = generate_inputs_hubbard()
inputs['meta_convergence'] = Bool(False)
process = generate_workchain_hubbard(inputs=inputs)

process.setup()
process.update_iteration()

process.ctx.workchains_hp = [generate_hp_workchain_node()]
assert process.inspect_hp() is None
Expand All @@ -313,12 +304,12 @@ def test_outline(
generate_workchain_hubbard, generate_inputs_hubbard, generate_scf_workchain_node, generate_hp_workchain_node
):
"""Test `SelfConsistentHubbardWorkChain` outline."""
from aiida.orm import Bool
inputs = generate_inputs_hubbard()
inputs['meta_convergence'] = Bool(True)
process = generate_workchain_hubbard(inputs=inputs)

process.setup()
process.update_iteration()

process.run_relax()
# assert 'workchains_relax' in process.ctx
Expand Down Expand Up @@ -347,6 +338,7 @@ def test_outline(

process.ctx.workchains_hp = [generate_hp_workchain_node()]
assert process.inspect_hp() is None
assert process.should_check_convergence()
process.check_convergence()
assert process.ctx.is_converged

Expand All @@ -358,9 +350,7 @@ def test_outline(
@pytest.mark.usefixtures('aiida_profile')
def test_should_run_relax(generate_workchain_hubbard, generate_inputs_hubbard):
"""Test `SelfConsistentHubbardWorkChain.should_run_relax` method."""
from aiida.orm import Bool
inputs = generate_inputs_hubbard()
inputs['meta_convergence'] = Bool(True)
inputs.pop('relax')
process = generate_workchain_hubbard(inputs=inputs)

Expand All @@ -378,6 +368,7 @@ def test_converged_check_convergence(
process = generate_workchain_hubbard(inputs=inputs)

process.setup()
process.update_iteration()

# Mocking current (i.e. "old") and "new" HubbardStructureData,
# containing different Hubbard parameters
Expand Down Expand Up @@ -433,25 +424,29 @@ def test_relabel_check_convergence(
process = generate_workchain_hubbard(inputs=inputs)

process.setup()
process.update_iteration()

current_hubbard_structure = generate_hubbard_structure(u_value=1, only_u=True)
process.ctx.current_hubbard_structure = current_hubbard_structure
process.ctx.workchains_hp = [generate_hp_workchain_node(relabel=True, u_value=100, only_u=True)]
process.check_convergence()
assert process.should_check_convergence()
assert not process.ctx.is_converged
assert process.ctx.current_hubbard_structure.get_kind_names() != current_hubbard_structure.get_kind_names()

current_hubbard_structure = generate_hubbard_structure(u_value=99.99, only_u=True)
process.ctx.current_hubbard_structure = current_hubbard_structure
process.ctx.workchains_hp = [generate_hp_workchain_node(relabel=True, u_value=100, only_u=True)]
process.check_convergence()
assert process.should_check_convergence()
assert process.ctx.is_converged
assert process.ctx.current_hubbard_structure.get_kind_names() != current_hubbard_structure.get_kind_names()

current_hubbard_structure = generate_hubbard_structure(u_value=99.99)
process.ctx.current_hubbard_structure = current_hubbard_structure
process.ctx.workchains_hp = [generate_hp_workchain_node(relabel=True, u_value=100)]
process.check_convergence()
assert process.should_check_convergence()
assert process.ctx.is_converged
assert process.ctx.current_hubbard_structure.get_kind_names() == current_hubbard_structure.get_kind_names()

Expand Down

0 comments on commit 7427431

Please sign in to comment.