Skip to content

Commit

Permalink
Update combine_imps tests + small bugfixes
Browse files Browse the repository at this point in the history
  • Loading branch information
PhilippRue committed Nov 25, 2024
1 parent 9f37e40 commit 660735c
Show file tree
Hide file tree
Showing 9 changed files with 53 additions and 19 deletions.
4 changes: 0 additions & 4 deletions aiida_kkr/tools/combine_imps.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,6 @@ def get_host_structure(impurity_workflow_or_calc):
extract host structure from impurity
"""
#TODO extract host parent no from input but take into account calculation of host GF from inside kkrimp full workflow
print(
f'This is line in the combine impurity tool files at:: /opt/aiida-kkr/aiida_kkr/tools for deburging the line',
end=' '
)
print(f'impurity_workflow_or_calc: {impurity_workflow_or_calc}')
if impurity_workflow_or_calc.process_class == KkrimpCalculation:
host_parent = impurity_workflow_or_calc.inputs.host_Greenfunction_folder
Expand Down
22 changes: 18 additions & 4 deletions aiida_kkr/workflows/_combine_imps.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
__copyright__ = (u'Copyright (c), 2020, Forschungszentrum Jülich GmbH, '
'IAS-1/PGI-1, Germany. All rights reserved.')
__license__ = 'MIT license, see LICENSE.txt file'
__version__ = '0.3.5'
__version__ = '0.3.6'
__contributors__ = (u'Philipp Rüßmann , Rubel Mozumder, David Antognini Silva')

# activate debug writeout
Expand Down Expand Up @@ -289,7 +289,14 @@ def combine_single_single(self):
imp_2 = self.ctx.imp2
# check for the impurity1 whether from single kkr_imp_wc or not
if imp_1.process_class == KkrimpCalculation:
Zimp_num_1 = imp_1.inputs.impurity_info.get_dict().get('Zimp')
try:
impurity_info = imp_1.inputs.impurity_info
except:
host_GF = imp_1.inputs.host_Greenfunction_folder
host_GF_calc = host_GF.base.links.get_incoming(node_class=CalcJobNode).first().node
impurity_info = host_GF_calc.inputs.impurity_info
Zimp_num_1 = impurity_info.get_dict().get('Zimp')

if isinstance(Zimp_num_1, list):
if len(Zimp_num_1) > 1:
single_imp_1 = False
Expand All @@ -304,7 +311,14 @@ def combine_single_single(self):

# check for the impurity2 whether from single kkr_imp_wc or not
if imp_2.process_class == KkrimpCalculation:
Zimp_num_2 = imp_2.inputs.impurity_info.get_dict().get('Zimp')
try:
impurity_info = imp_2.inputs.impurity_info
except:
host_GF = imp_2.inputs.host_Greenfunction_folder
host_GF_calc = host_GF.base.links.get_incoming(node_class=CalcJobNode).first().node
impurity_info = host_GF_calc.inputs.impurity_info
Zimp_num_2 = impurity_info.get_dict().get('Zimp')

if isinstance(Zimp_num_2, list):
if len(Zimp_num_2) > 1:
single_imp_2 = False
Expand Down Expand Up @@ -393,7 +407,7 @@ def extract_imps_info_exact_cluster(self):
imps_info_in_exact_cluster['Zimps'].append(Zimp_2)

imps_info_in_exact_cluster['ilayers'].append(imp2_impurity_info.get_dict()['ilayer_center'])
# TODO: Delete the below print line as it is for deburging
# TODO: Delete the below print line as it is for debugging
self.report(f'DEBUG: The is the imps_info_in_exact_cluster dict: {imps_info_in_exact_cluster}\n')
return 0, imps_info_in_exact_cluster # return also exit code

Expand Down
Binary file added tests/data_dir/combine_imps.aiida
Binary file not shown.
Binary file not shown.
Binary file added tests/data_dir/combine_imps_reuse_gf.aiida
Binary file not shown.
46 changes: 35 additions & 11 deletions tests/workflows/test_combine_imps.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from ..conftest import kkrimp_local_code, kkrhost_local_code, test_dir, data_dir
from aiida.orm import load_node, Dict, load_group
from aiida.engine import run_get_node
from aiida_kkr.workflows import combine_imps_wc
from aiida_kkr.workflows import combine_imps_wc, kkr_scf_wc
from ..conftest import import_with_migration

# activate debug mode?
Expand Down Expand Up @@ -65,15 +65,29 @@ def get_builder_basic(label, kkrhost_local_code, kkrimp_local_code):
'custom_scheduler_commands': ''
}
builder.scf.options = Dict(options)
builder.scf.wf_parameters = Dict({'do_final_cleanup': False}) # this is needed to allow for caching

scf_settings = kkr_scf_wc.get_wf_defaults(silent=True)[0]
scf_settings.kkr_runmax = 1
scf_settings.nsteps = 2
scf_settings.do_final_cleanup = False # this is needed to allow for caching
builder.scf.wf_parameters = Dict(scf_settings)
builder.scf.params_overwrite = Dict({'TOL_ALAT_CHECK': 1e-8})

builder.host_gf.wf_parameters = Dict({'retrieve_kkrflex': True}) # this is needed to allow for caching

builder.host_gf.options = builder.scf.options
builder.wf_parameters_overwrite = Dict({'global': {'allow_unconverged_inputs': True}})

return builder


def test_combine_imps(
clear_database_before_test, kkrhost_local_code, kkrimp_local_code, enable_archive_cache, nopytest=False
clear_database_before_test,
kkrhost_local_code,
kkrimp_local_code,
enable_archive_cache,
ndarrays_regression,
nopytest=False
):
"""
test for combine_imps_wc (place two imps next to each other)
Expand All @@ -95,13 +109,18 @@ def test_combine_imps(
# check outcome
results = out['workflow_info'].get_dict()
print(results)
assert results['successful']
assert results['convergence_reached']
assert 'remote_data_gf' in out # make sure GF writeout step was done
check_dict = {'rms_per_atom': node.outputs.last_calc_output_parameters['convergence_group']['rms_per_atom']}
ndarrays_regression.check(check_dict)


def test_combine_imps_params_kkr_overwrite(
clear_database_before_test, kkrhost_local_code, kkrimp_local_code, enable_archive_cache, nopytest=False
clear_database_before_test,
kkrhost_local_code,
kkrimp_local_code,
enable_archive_cache,
ndarrays_regression,
nopytest=False
):
"""
test for combine_imps_wc overwriting the k-mesh with hte params_kkr_overwrite input to the gf writeout step
Expand All @@ -124,13 +143,18 @@ def test_combine_imps_params_kkr_overwrite(
# check outcome
results = out['workflow_info'].get_dict()
print(results)
assert results['successful']
assert results['convergence_reached']
assert 'remote_data_gf' in out # make sure GF writeout step was done
check_dict = {'rms_per_atom': node.outputs.last_calc_output_parameters['convergence_group']['rms_per_atom']}
ndarrays_regression.check(check_dict)


def test_combine_imps_reuse_gf(
clear_database_before_test, kkrhost_local_code, kkrimp_local_code, enable_archive_cache, nopytest=False
clear_database_before_test,
kkrhost_local_code,
kkrimp_local_code,
enable_archive_cache,
ndarrays_regression,
nopytest=False
):
"""
test for combine_imps_wc reusing the host gf from a previous calculation
Expand Down Expand Up @@ -160,9 +184,9 @@ def test_combine_imps_reuse_gf(
# check outcome
results = out['workflow_info'].get_dict()
print(results)
assert results['successful']
assert results['convergence_reached']
assert 'remote_data_gf' not in out # make sure GF writeout step was skipped
check_dict = {'rms_per_atom': node.outputs.last_calc_output_parameters['convergence_group']['rms_per_atom']}
ndarrays_regression.check(check_dict)


# run manual:
Expand Down
Binary file not shown.
Binary file not shown.
Binary file not shown.

0 comments on commit 660735c

Please sign in to comment.