Skip to content

Commit

Permalink
Dependencies: Update requirement aiida-core~=2.1
Browse files Browse the repository at this point in the history
  • Loading branch information
Sebastiaan Huber authored and sphuber committed Feb 18, 2024
1 parent e3af3f0 commit 99eeaa1
Show file tree
Hide file tree
Showing 61 changed files with 230 additions and 203 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/cd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ jobs:
- name: Install Python
uses: actions/setup-python@v4
with:
python-version: '3.8'
python-version: '3.9'
cache: 'pip'
cache-dependency-path: pyproject.toml

Expand All @@ -56,7 +56,7 @@ jobs:

strategy:
matrix:
python-version: ['3.8', '3.9']
python-version: ['3.9']

services:
rabbitmq:
Expand All @@ -78,7 +78,7 @@ jobs:
run: pip install -U pip wheel

- name: Install Python package and dependencies
run: pip install -e .[tests] && reentry scan
run: pip install -e .[tests]

- name: Run pytest
run: pytest -sv tests
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
- name: Install Python
uses: actions/setup-python@v4
with:
python-version: '3.8'
python-version: '3.9'
cache: 'pip'
cache-dependency-path: pyproject.toml

Expand All @@ -34,7 +34,7 @@ jobs:

strategy:
matrix:
python-version: ['3.8', '3.9']
python-version: ['3.9']

services:
rabbitmq:
Expand All @@ -56,7 +56,7 @@ jobs:
run: pip install -U pip wheel

- name: Install Python package and dependencies
run: pip install -e .[tests] && reentry scan
run: pip install -e .[tests]

- name: Run pytest
env:
Expand Down
9 changes: 7 additions & 2 deletions aiida_common_workflows/cli/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,10 +105,15 @@ def convert(self, value, param, ctx):
f'file `{value}` could not be parsed into a `StructureData`: {exception}'
) from exception

duplicate = QueryBuilder().append(StructureData, filters={'extras._aiida_hash': structure._get_hash()}).first() # pylint: disable=protected-access
duplicate = QueryBuilder().append(
StructureData,
filters={
'extras._aiida_hash': structure.base.caching._get_hash() # pylint: disable=protected-access
}
).first()

if duplicate:
return duplicate[0]
return duplicate[0] # pylint: disable=unsubscriptable-object

return structure

Expand Down
4 changes: 2 additions & 2 deletions aiida_common_workflows/cli/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def cmd_plot_eos(workflow, precisions, print_table, output_file):
echo.echo_critical(
f'node {workflow.__class__.__name__}<{workflow.pk}> does not correspond to an EquationOfStateWorkChain.'
)
outputs = workflow.get_outgoing(link_type=LinkType.RETURN).nested()
outputs = workflow.base.links.get_outgoing(link_type=LinkType.RETURN).nested()

missing_outputs = tuple(output for output in ('structures', 'total_energies') if output not in outputs)
if missing_outputs:
Expand Down Expand Up @@ -107,7 +107,7 @@ def cmd_plot_dissociation_curve(workflow, precisions, print_table, output_file):
echo.echo_critical(
f'node {workflow.__class__.__name__}<{workflow.pk}> does not correspond to a DissociationCurveWorkChain.'
)
outputs = workflow.get_outgoing(link_type=LinkType.RETURN).nested()
outputs = workflow.base.links.get_outgoing(link_type=LinkType.RETURN).nested()

missing_outputs = tuple(output for output in ('distances', 'total_energies') if output not in outputs)
if missing_outputs:
Expand Down
7 changes: 4 additions & 3 deletions aiida_common_workflows/cli/root.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
# -*- coding: utf-8 -*-
"""Command line interface ``acwf``."""
from aiida.cmdline.groups import VerdiCommandGroup
from aiida.cmdline.params import options, types
import click


@click.group('acwf', context_settings={'help_option_names': ['-h', '--help']})
@options.PROFILE(type=types.ProfileParamType(load_profile=True))
def cmd_root(profile): # pylint: disable=unused-argument
@click.group('acwf', cls=VerdiCommandGroup, context_settings={'help_option_names': ['-h', '--help']})
@options.PROFILE(type=types.ProfileParamType(load_profile=True), expose_value=False)
def cmd_root():
"""CLI for the ``aiida-common-workflows`` plugin."""
8 changes: 4 additions & 4 deletions aiida_common_workflows/cli/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def echo_process_results(node):
from aiida.common.links import LinkType

class_name = node.process_class.__name__
outputs = node.get_outgoing(link_type=(LinkType.CREATE, LinkType.RETURN)).all()
outputs = node.base.links.get_outgoing(link_type=(LinkType.CREATE, LinkType.RETURN)).all()

if node.is_finished and node.exit_message:
state = f'{node.process_state.value} [{node.exit_status}] `{node.exit_message}`'
Expand Down Expand Up @@ -78,13 +78,13 @@ def get_code_from_list_or_database(codes, entry_point: str):
:param entry_point: calculation job entry point name.
:return: a ``Code`` instance configured for the given entry point or ``None``.
"""
from aiida.orm import Code, QueryBuilder
from aiida.orm import InstalledCode, QueryBuilder

for entry in codes:
if entry.get_attribute('input_plugin') == entry_point:
if entry.default_calc_job_plugin == entry_point:
return entry

result = QueryBuilder().append(Code, filters={'attributes.input_plugin': entry_point}).first()
result = QueryBuilder().append(InstalledCode, filters={'attributes.input_plugin': entry_point}).first()

if result is not None:
return result[0]
Expand Down
12 changes: 7 additions & 5 deletions aiida_common_workflows/generators/ports.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
"""Modules with resources to define specific port types for input generator specifications."""
from __future__ import annotations

import typing as t

from aiida.engine import InputPort
Expand Down Expand Up @@ -30,7 +32,7 @@ def __init__(self, choices: t.Sequence[t.Any]):
"""
valid_types = tuple({type(choice) for choice in choices})
self.choices: t.Sequence[t.Any] = choices
self.valid_type: t.Tuple[t.Any] = valid_types if len(valid_types) > 1 else valid_types[0]
self.valid_type: tuple[t.Any] = valid_types if len(valid_types) > 1 else valid_types[0]


class InputGeneratorPort(InputPort):
Expand All @@ -45,7 +47,7 @@ def __init__(self, *args, valid_type=None, **kwargs) -> None:
self.valid_type = valid_type

@Port.valid_type.setter
def valid_type(self, valid_type: t.Optional[t.Any]) -> None:
def valid_type(self, valid_type: t.Any | None) -> None:
"""Set the valid value type for this port.
:param valid_type: the value valid type.
Expand All @@ -60,15 +62,15 @@ def valid_type(self, valid_type: t.Optional[t.Any]) -> None:

self._valid_type = valid_type

def validate(self, value: t.Any, breadcrumbs: t.Sequence[str] = ()) -> t.Optional[PortValidationError]:
def validate(self, value: t.Any, breadcrumbs: t.Sequence[str] = ()) -> PortValidationError | None:
"""Validate the value by calling the super followed by checking it against the choices if defined."""
result = super().validate(value, breadcrumbs)

if result is not None:
return result

if self.code_entry_point is not None and value.get_input_plugin_name() != self.code_entry_point:
return f'invalid entry point `{value.get_input_plugin_name()}` for `Code{value}`.'
if self.code_entry_point is not None and value.default_calc_job_plugin != self.code_entry_point:
return f'invalid entry point `{value.default_calc_job_plugin}` for `Code{value}`.'

if value is not UNSPECIFIED and self.choices is not None and value not in self.choices:
choices = [str(value) for value in self.choices]
Expand Down
5 changes: 3 additions & 2 deletions aiida_common_workflows/generators/spec.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
"""Class to define the specification of an input generator."""
from __future__ import annotations

import typing as t

from aiida.engine import PortNamespace
Expand All @@ -24,8 +26,7 @@ def namespace_separator(self) -> str:
return self.PORT_NAMESPACE_TYPE.NAMESPACE_SEPARATOR

def _create_port(
self, port_namespace: PortNamespace, port_class: t.Union[InputGeneratorPort, PortNamespace], name: str,
**kwargs: t.Any
self, port_namespace: PortNamespace, port_class: InputGeneratorPort | PortNamespace, name: str, **kwargs: t.Any
) -> None:
"""Create a new port of a given class and name in a given namespace.
Expand Down
2 changes: 1 addition & 1 deletion aiida_common_workflows/workflows/bands/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def define(cls, spec):
super().define(spec)
spec.input(
'bands_kpoints',
valid_type=plugins.DataFactory('array.kpoints'),
valid_type=plugins.DataFactory('core.array.kpoints'),
required=True,
help='The full list of kpoints where to calculate bands, in (direct) coordinates of the reciprocal space.'
)
Expand Down
2 changes: 1 addition & 1 deletion aiida_common_workflows/workflows/bands/siesta/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
from .generator import *
from .workchain import *

__all__ = (generator.__all__ + workchain.__all__)
__all__ = generator.__all__ + workchain.__all__
4 changes: 2 additions & 2 deletions aiida_common_workflows/workflows/dissociation.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,8 @@ def set_distance(molecule: orm.StructureData, distance: orm.Float) -> orm.Struct
versor_diff = vector_diff / np.linalg.norm(vector_diff)
new_molecule = molecule.clone()
new_position = (distance.value * versor_diff) / 2
new_molecule.attributes['sites'][0]['position'] = -new_position
new_molecule.attributes['sites'][1]['position'] = new_position
new_molecule.base.attributes.get('sites')[0]['position'] = -new_position
new_molecule.base.attributes.get('sites')[1]['position'] = new_position
return new_molecule


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,5 +24,5 @@ def get_ts_energy(common_relax_workchain: AbinitCommonRelaxWorkChain) -> float:
if common_relax_workchain.process_class != AbinitCommonRelaxWorkChain:
return ValueError('The input workchain is not a `AbinitCommonRelaxWorkChain`')

abinit_base_wc = common_relax_workchain.get_outgoing(link_type=LinkType.CALL_WORK).one().node
abinit_base_wc = common_relax_workchain.base.links.get_outgoing(link_type=LinkType.CALL_WORK).one().node
return -abinit_base_wc.outputs.output_parameters['e_entropy']
6 changes: 3 additions & 3 deletions aiida_common_workflows/workflows/relax/abinit/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

__all__ = ('AbinitCommonRelaxInputGenerator',)

StructureData = plugins.DataFactory('structure')
StructureData = plugins.DataFactory('core.structure')


class AbinitCommonRelaxInputGenerator(CommonRelaxInputGenerator):
Expand All @@ -34,7 +34,7 @@ def __init__(self, *args, **kwargs):

def _initialize_protocols(self):
"""Initialize the protocols class attribute by parsing them from the configuration file."""
with open(str(pathlib.Path(__file__).parent / 'protocol.yml')) as handle:
with open(str(pathlib.Path(__file__).parent / 'protocol.yml'), encoding='utf-8') as handle:
self._protocols = yaml.safe_load(handle)

@classmethod
Expand Down Expand Up @@ -76,7 +76,7 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder:

pseudo_family_label = protocol.pop('pseudo_family')
try:
pseudo_family = orm.Group.objects.get(label=pseudo_family_label)
pseudo_family = orm.Group.collection.get(label=pseudo_family_label)
except exceptions.NotExistent as exception:
raise ValueError(
f'required pseudo family `{pseudo_family_label}` is not installed. '
Expand Down
6 changes: 3 additions & 3 deletions aiida_common_workflows/workflows/relax/abinit/workchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,22 +18,22 @@
def get_stress(parameters):
"""Return the stress array from the given parameters node."""
stress = orm.ArrayData()
stress.set_array(name='stress', array=np.array(parameters.get_attribute('cart_stress_tensor')) * GPA_TO_EV_A3)
stress.set_array(name='stress', array=np.array(parameters.base.attributes.get('cart_stress_tensor')) * GPA_TO_EV_A3)
return stress


@calcfunction
def get_forces(parameters):
"""Return the forces array from the given parameters node."""
forces = orm.ArrayData()
forces.set_array(name='forces', array=np.array(parameters.get_attribute('forces')))
forces.set_array(name='forces', array=np.array(parameters.base.attributes.get('forces')))
return forces


@calcfunction
def get_total_energy(parameters):
"""Return the total energy from the given parameters node."""
return orm.Float(parameters.get_attribute('energy'))
return orm.Float(parameters.base.attributes.get('energy'))


@calcfunction
Expand Down
2 changes: 1 addition & 1 deletion aiida_common_workflows/workflows/relax/bigdft/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
from .generator import *
from .workchain import *

__all__ = (generator.__all__ + workchain.__all__)
__all__ = generator.__all__ + workchain.__all__
2 changes: 1 addition & 1 deletion aiida_common_workflows/workflows/relax/bigdft/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
__all__ = ('BigDftCommonRelaxInputGenerator',)

BigDFTParameters = plugins.DataFactory('bigdft')
StructureData = plugins.DataFactory('structure')
StructureData = plugins.DataFactory('core.structure')


class BigDftCommonRelaxInputGenerator(CommonRelaxInputGenerator):
Expand Down
2 changes: 1 addition & 1 deletion aiida_common_workflows/workflows/relax/castep/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
from .generator import *
from .workchain import *

__all__ = (generator.__all__ + workchain.__all__)
__all__ = generator.__all__ + workchain.__all__
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def get_ts_energy(common_relax_workchain):
if common_relax_workchain.process_class != CastepCommonRelaxWorkChain:
return ValueError('The input workchain is not a `CastepCommonRelaxWorkChain`')

castep_base_wc = common_relax_workchain.get_outgoing(link_type=LinkType.CALL_WORK).one().node
castep_base_wc = common_relax_workchain.base.links.get_outgoing(link_type=LinkType.CALL_WORK).one().node
e_ks = castep_base_wc.outputs.output_parameters['total energy']
free_e = castep_base_wc.outputs.output_parameters['free energy']

Expand Down
20 changes: 9 additions & 11 deletions aiida_common_workflows/workflows/relax/castep/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

__all__ = ('CastepCommonRelaxInputGenerator',)

StructureData = plugins.DataFactory('structure') # pylint: disable=invalid-name
StructureData = plugins.DataFactory('core.structure') # pylint: disable=invalid-name


class CastepCommonRelaxInputGenerator(CommonRelaxInputGenerator):
Expand All @@ -37,7 +37,7 @@ def __init__(self, *args, **kwargs):

def _initialize_protocols(self):
"""Initialize the protocols class attribute by parsing them from the configuration file."""
with open(str(pathlib.Path(__file__).parent / 'protocol.yml')) as handle:
with open(str(pathlib.Path(__file__).parent / 'protocol.yml'), encoding='utf-8') as handle:
self._protocols = yaml.safe_load(handle)

@classmethod
Expand Down Expand Up @@ -170,7 +170,7 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder:
# this is because the small basis set will give rise to errors in EOS / variable volume
# relaxation even with the "fine" option
if 'cut_off_energy' not in protocol['relax']['base']['calc']['parameters']:
with open(str(pathlib.Path(__file__).parent / 'soft_elements.yml')) as fhandle:
with open(str(pathlib.Path(__file__).parent / 'soft_elements.yml'), encoding='utf-8') as fhandle:
soft_elements = yaml.safe_load(fhandle)
symbols = [kind.symbol for kind in structure.kinds]
if all(sym in soft_elements for sym in symbols):
Expand Down Expand Up @@ -256,13 +256,11 @@ def generate_inputs(
if isinstance(family_name, orm.Str):
family_name = family_name.value
try:
otfg_family = OTFGGroup.objects.get(label=family_name)
otfg_family = OTFGGroup.collection.get(label=family_name)
except exceptions.NotExistent as exc:
raise ValueError(
'protocol `{}` requires the `{}` `pseudos family` but could not be found.'.format(
protocol['name'], protocol['relax']['base']['pseudos_family']
)
) from exc
name = protocol['name']
family = protocol['relax']['base']['pseudos_family']
raise ValueError(f'protocol `{name}` requires the `{family}` `pseudos family` but could not be found.') from exc

CastepCalculation = plugins.CalculationFactory('castep.castep') # pylint: disable=invalid-name
CastepBaseWorkChain = plugins.WorkflowFactory('castep.base') # pylint: disable=invalid-name
Expand Down Expand Up @@ -426,7 +424,7 @@ def ensure_otfg_family(family_name, force_update=False):
if isinstance(family_name, orm.Str):
family_name = family_name.value
try:
OTFGGroup.objects.get(label=family_name)
OTFGGroup.collection.get(label=family_name)
except NotExistent:
has_family = False
else:
Expand All @@ -441,7 +439,7 @@ def ensure_otfg_family(family_name, force_update=False):

# Not an known family - check if it in the additional settings list
# Load configuration from the settings
with open(str(pathlib.Path(__file__).parent / 'additional_otfg_families.yml')) as handle:
with open(str(pathlib.Path(__file__).parent / 'additional_otfg_families.yml'), encoding='utf-8') as handle:
additional = yaml.safe_load(handle)

if family_name in additional:
Expand Down
4 changes: 2 additions & 2 deletions aiida_common_workflows/workflows/relax/castep/workchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def get_free_energy(parameters):
Return the free energy from the given parameters node.
The free energy reported by CASTEP is the one that is consistent with the forces.
"""
return orm.Float(parameters.get_attribute('free_energy'))
return orm.Float(parameters.base.attributes.get('free_energy'))


@calcfunction
Expand All @@ -62,7 +62,7 @@ def get_total_magnetization(parameters):
Return the free energy from the given parameters node.
The free energy reported by CASTEP is the one that is consistent with the forces.
"""
return orm.Float(parameters.get_attribute('spin_density'))
return orm.Float(parameters.base.attributes.get('spin_density'))


class CastepCommonRelaxWorkChain(CommonRelaxWorkChain):
Expand Down
2 changes: 1 addition & 1 deletion aiida_common_workflows/workflows/relax/cp2k/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
from .generator import *
from .workchain import *

__all__ = (generator.__all__ + workchain.__all__)
__all__ = generator.__all__ + workchain.__all__
Loading

0 comments on commit 99eeaa1

Please sign in to comment.