diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml index 43986e01..bede452d 100644 --- a/.github/workflows/cd.yml +++ b/.github/workflows/cd.yml @@ -35,7 +35,7 @@ jobs: - name: Install Python uses: actions/setup-python@v4 with: - python-version: '3.8' + python-version: '3.9' cache: 'pip' cache-dependency-path: pyproject.toml @@ -56,7 +56,7 @@ jobs: strategy: matrix: - python-version: ['3.8', '3.9'] + python-version: ['3.9'] services: rabbitmq: @@ -78,7 +78,7 @@ jobs: run: pip install -U pip wheel - name: Install Python package and dependencies - run: pip install -e .[tests] && reentry scan + run: pip install -e .[tests] - name: Run pytest run: pytest -sv tests diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3f423a51..5593bfbd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,7 +14,7 @@ jobs: - name: Install Python uses: actions/setup-python@v4 with: - python-version: '3.8' + python-version: '3.9' cache: 'pip' cache-dependency-path: pyproject.toml @@ -34,7 +34,7 @@ jobs: strategy: matrix: - python-version: ['3.8', '3.9'] + python-version: ['3.9'] services: rabbitmq: @@ -56,7 +56,7 @@ jobs: run: pip install -U pip wheel - name: Install Python package and dependencies - run: pip install -e .[tests] && reentry scan + run: pip install -e .[tests] - name: Run pytest env: diff --git a/aiida_common_workflows/cli/options.py b/aiida_common_workflows/cli/options.py index 92734749..43619f6e 100644 --- a/aiida_common_workflows/cli/options.py +++ b/aiida_common_workflows/cli/options.py @@ -105,10 +105,15 @@ def convert(self, value, param, ctx): f'file `{value}` could not be parsed into a `StructureData`: {exception}' ) from exception - duplicate = QueryBuilder().append(StructureData, filters={'extras._aiida_hash': structure._get_hash()}).first() # pylint: disable=protected-access + duplicate = QueryBuilder().append( + StructureData, + filters={ + 'extras._aiida_hash': structure.base.caching._get_hash() # pylint: disable=protected-access + } + ).first() if duplicate: - return duplicate[0] + return duplicate[0] # pylint: disable=unsubscriptable-object return structure diff --git a/aiida_common_workflows/cli/plot.py b/aiida_common_workflows/cli/plot.py index c1778108..9133f8e8 100644 --- a/aiida_common_workflows/cli/plot.py +++ b/aiida_common_workflows/cli/plot.py @@ -34,7 +34,7 @@ def cmd_plot_eos(workflow, precisions, print_table, output_file): echo.echo_critical( f'node {workflow.__class__.__name__}<{workflow.pk}> does not correspond to an EquationOfStateWorkChain.' ) - outputs = workflow.get_outgoing(link_type=LinkType.RETURN).nested() + outputs = workflow.base.links.get_outgoing(link_type=LinkType.RETURN).nested() missing_outputs = tuple(output for output in ('structures', 'total_energies') if output not in outputs) if missing_outputs: @@ -107,7 +107,7 @@ def cmd_plot_dissociation_curve(workflow, precisions, print_table, output_file): echo.echo_critical( f'node {workflow.__class__.__name__}<{workflow.pk}> does not correspond to a DissociationCurveWorkChain.' ) - outputs = workflow.get_outgoing(link_type=LinkType.RETURN).nested() + outputs = workflow.base.links.get_outgoing(link_type=LinkType.RETURN).nested() missing_outputs = tuple(output for output in ('distances', 'total_energies') if output not in outputs) if missing_outputs: diff --git a/aiida_common_workflows/cli/root.py b/aiida_common_workflows/cli/root.py index 82c7153e..81953871 100644 --- a/aiida_common_workflows/cli/root.py +++ b/aiida_common_workflows/cli/root.py @@ -1,10 +1,11 @@ # -*- coding: utf-8 -*- """Command line interface ``acwf``.""" +from aiida.cmdline.groups import VerdiCommandGroup from aiida.cmdline.params import options, types import click -@click.group('acwf', context_settings={'help_option_names': ['-h', '--help']}) -@options.PROFILE(type=types.ProfileParamType(load_profile=True)) -def cmd_root(profile): # pylint: disable=unused-argument +@click.group('acwf', cls=VerdiCommandGroup, context_settings={'help_option_names': ['-h', '--help']}) +@options.PROFILE(type=types.ProfileParamType(load_profile=True), expose_value=False) +def cmd_root(): """CLI for the ``aiida-common-workflows`` plugin.""" diff --git a/aiida_common_workflows/cli/utils.py b/aiida_common_workflows/cli/utils.py index 27527621..7abf4455 100644 --- a/aiida_common_workflows/cli/utils.py +++ b/aiida_common_workflows/cli/utils.py @@ -16,7 +16,7 @@ def echo_process_results(node): from aiida.common.links import LinkType class_name = node.process_class.__name__ - outputs = node.get_outgoing(link_type=(LinkType.CREATE, LinkType.RETURN)).all() + outputs = node.base.links.get_outgoing(link_type=(LinkType.CREATE, LinkType.RETURN)).all() if node.is_finished and node.exit_message: state = f'{node.process_state.value} [{node.exit_status}] `{node.exit_message}`' @@ -78,13 +78,13 @@ def get_code_from_list_or_database(codes, entry_point: str): :param entry_point: calculation job entry point name. :return: a ``Code`` instance configured for the given entry point or ``None``. """ - from aiida.orm import Code, QueryBuilder + from aiida.orm import InstalledCode, QueryBuilder for entry in codes: - if entry.get_attribute('input_plugin') == entry_point: + if entry.default_calc_job_plugin == entry_point: return entry - result = QueryBuilder().append(Code, filters={'attributes.input_plugin': entry_point}).first() + result = QueryBuilder().append(InstalledCode, filters={'attributes.input_plugin': entry_point}).first() if result is not None: return result[0] diff --git a/aiida_common_workflows/generators/ports.py b/aiida_common_workflows/generators/ports.py index 1863f29d..068b964d 100644 --- a/aiida_common_workflows/generators/ports.py +++ b/aiida_common_workflows/generators/ports.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- """Modules with resources to define specific port types for input generator specifications.""" +from __future__ import annotations + import typing as t from aiida.engine import InputPort @@ -30,7 +32,7 @@ def __init__(self, choices: t.Sequence[t.Any]): """ valid_types = tuple({type(choice) for choice in choices}) self.choices: t.Sequence[t.Any] = choices - self.valid_type: t.Tuple[t.Any] = valid_types if len(valid_types) > 1 else valid_types[0] + self.valid_type: tuple[t.Any] = valid_types if len(valid_types) > 1 else valid_types[0] class InputGeneratorPort(InputPort): @@ -45,7 +47,7 @@ def __init__(self, *args, valid_type=None, **kwargs) -> None: self.valid_type = valid_type @Port.valid_type.setter - def valid_type(self, valid_type: t.Optional[t.Any]) -> None: + def valid_type(self, valid_type: t.Any | None) -> None: """Set the valid value type for this port. :param valid_type: the value valid type. @@ -60,15 +62,15 @@ def valid_type(self, valid_type: t.Optional[t.Any]) -> None: self._valid_type = valid_type - def validate(self, value: t.Any, breadcrumbs: t.Sequence[str] = ()) -> t.Optional[PortValidationError]: + def validate(self, value: t.Any, breadcrumbs: t.Sequence[str] = ()) -> PortValidationError | None: """Validate the value by calling the super followed by checking it against the choices if defined.""" result = super().validate(value, breadcrumbs) if result is not None: return result - if self.code_entry_point is not None and value.get_input_plugin_name() != self.code_entry_point: - return f'invalid entry point `{value.get_input_plugin_name()}` for `Code{value}`.' + if self.code_entry_point is not None and value.default_calc_job_plugin != self.code_entry_point: + return f'invalid entry point `{value.default_calc_job_plugin}` for `Code{value}`.' if value is not UNSPECIFIED and self.choices is not None and value not in self.choices: choices = [str(value) for value in self.choices] diff --git a/aiida_common_workflows/generators/spec.py b/aiida_common_workflows/generators/spec.py index 35bbb6e1..eb721860 100644 --- a/aiida_common_workflows/generators/spec.py +++ b/aiida_common_workflows/generators/spec.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- """Class to define the specification of an input generator.""" +from __future__ import annotations + import typing as t from aiida.engine import PortNamespace @@ -24,8 +26,7 @@ def namespace_separator(self) -> str: return self.PORT_NAMESPACE_TYPE.NAMESPACE_SEPARATOR def _create_port( - self, port_namespace: PortNamespace, port_class: t.Union[InputGeneratorPort, PortNamespace], name: str, - **kwargs: t.Any + self, port_namespace: PortNamespace, port_class: InputGeneratorPort | PortNamespace, name: str, **kwargs: t.Any ) -> None: """Create a new port of a given class and name in a given namespace. diff --git a/aiida_common_workflows/workflows/bands/generator.py b/aiida_common_workflows/workflows/bands/generator.py index 94a17463..2995fcb9 100644 --- a/aiida_common_workflows/workflows/bands/generator.py +++ b/aiida_common_workflows/workflows/bands/generator.py @@ -25,7 +25,7 @@ def define(cls, spec): super().define(spec) spec.input( 'bands_kpoints', - valid_type=plugins.DataFactory('array.kpoints'), + valid_type=plugins.DataFactory('core.array.kpoints'), required=True, help='The full list of kpoints where to calculate bands, in (direct) coordinates of the reciprocal space.' ) diff --git a/aiida_common_workflows/workflows/bands/siesta/__init__.py b/aiida_common_workflows/workflows/bands/siesta/__init__.py index c725bf86..b31fb266 100644 --- a/aiida_common_workflows/workflows/bands/siesta/__init__.py +++ b/aiida_common_workflows/workflows/bands/siesta/__init__.py @@ -4,4 +4,4 @@ from .generator import * from .workchain import * -__all__ = (generator.__all__ + workchain.__all__) +__all__ = generator.__all__ + workchain.__all__ diff --git a/aiida_common_workflows/workflows/dissociation.py b/aiida_common_workflows/workflows/dissociation.py index 70e7a663..18e30322 100644 --- a/aiida_common_workflows/workflows/dissociation.py +++ b/aiida_common_workflows/workflows/dissociation.py @@ -97,8 +97,8 @@ def set_distance(molecule: orm.StructureData, distance: orm.Float) -> orm.Struct versor_diff = vector_diff / np.linalg.norm(vector_diff) new_molecule = molecule.clone() new_position = (distance.value * versor_diff) / 2 - new_molecule.attributes['sites'][0]['position'] = -new_position - new_molecule.attributes['sites'][1]['position'] = new_position + new_molecule.base.attributes.get('sites')[0]['position'] = -new_position + new_molecule.base.attributes.get('sites')[1]['position'] = new_position return new_molecule diff --git a/aiida_common_workflows/workflows/relax/abinit/extractors.py b/aiida_common_workflows/workflows/relax/abinit/extractors.py index 9480ad6b..2b7c5126 100644 --- a/aiida_common_workflows/workflows/relax/abinit/extractors.py +++ b/aiida_common_workflows/workflows/relax/abinit/extractors.py @@ -24,5 +24,5 @@ def get_ts_energy(common_relax_workchain: AbinitCommonRelaxWorkChain) -> float: if common_relax_workchain.process_class != AbinitCommonRelaxWorkChain: return ValueError('The input workchain is not a `AbinitCommonRelaxWorkChain`') - abinit_base_wc = common_relax_workchain.get_outgoing(link_type=LinkType.CALL_WORK).one().node + abinit_base_wc = common_relax_workchain.base.links.get_outgoing(link_type=LinkType.CALL_WORK).one().node return -abinit_base_wc.outputs.output_parameters['e_entropy'] diff --git a/aiida_common_workflows/workflows/relax/abinit/generator.py b/aiida_common_workflows/workflows/relax/abinit/generator.py index 7e215b7c..0f145697 100644 --- a/aiida_common_workflows/workflows/relax/abinit/generator.py +++ b/aiida_common_workflows/workflows/relax/abinit/generator.py @@ -19,7 +19,7 @@ __all__ = ('AbinitCommonRelaxInputGenerator',) -StructureData = plugins.DataFactory('structure') +StructureData = plugins.DataFactory('core.structure') class AbinitCommonRelaxInputGenerator(CommonRelaxInputGenerator): @@ -34,7 +34,7 @@ def __init__(self, *args, **kwargs): def _initialize_protocols(self): """Initialize the protocols class attribute by parsing them from the configuration file.""" - with open(str(pathlib.Path(__file__).parent / 'protocol.yml')) as handle: + with open(str(pathlib.Path(__file__).parent / 'protocol.yml'), encoding='utf-8') as handle: self._protocols = yaml.safe_load(handle) @classmethod @@ -76,7 +76,7 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: pseudo_family_label = protocol.pop('pseudo_family') try: - pseudo_family = orm.Group.objects.get(label=pseudo_family_label) + pseudo_family = orm.Group.collection.get(label=pseudo_family_label) except exceptions.NotExistent as exception: raise ValueError( f'required pseudo family `{pseudo_family_label}` is not installed. ' diff --git a/aiida_common_workflows/workflows/relax/abinit/workchain.py b/aiida_common_workflows/workflows/relax/abinit/workchain.py index 9f491b18..48a3984c 100644 --- a/aiida_common_workflows/workflows/relax/abinit/workchain.py +++ b/aiida_common_workflows/workflows/relax/abinit/workchain.py @@ -18,7 +18,7 @@ def get_stress(parameters): """Return the stress array from the given parameters node.""" stress = orm.ArrayData() - stress.set_array(name='stress', array=np.array(parameters.get_attribute('cart_stress_tensor')) * GPA_TO_EV_A3) + stress.set_array(name='stress', array=np.array(parameters.base.attributes.get('cart_stress_tensor')) * GPA_TO_EV_A3) return stress @@ -26,14 +26,14 @@ def get_stress(parameters): def get_forces(parameters): """Return the forces array from the given parameters node.""" forces = orm.ArrayData() - forces.set_array(name='forces', array=np.array(parameters.get_attribute('forces'))) + forces.set_array(name='forces', array=np.array(parameters.base.attributes.get('forces'))) return forces @calcfunction def get_total_energy(parameters): """Return the total energy from the given parameters node.""" - return orm.Float(parameters.get_attribute('energy')) + return orm.Float(parameters.base.attributes.get('energy')) @calcfunction diff --git a/aiida_common_workflows/workflows/relax/bigdft/__init__.py b/aiida_common_workflows/workflows/relax/bigdft/__init__.py index cf7fea01..5b6bef36 100644 --- a/aiida_common_workflows/workflows/relax/bigdft/__init__.py +++ b/aiida_common_workflows/workflows/relax/bigdft/__init__.py @@ -4,4 +4,4 @@ from .generator import * from .workchain import * -__all__ = (generator.__all__ + workchain.__all__) +__all__ = generator.__all__ + workchain.__all__ diff --git a/aiida_common_workflows/workflows/relax/bigdft/generator.py b/aiida_common_workflows/workflows/relax/bigdft/generator.py index a4e2e73e..14536131 100644 --- a/aiida_common_workflows/workflows/relax/bigdft/generator.py +++ b/aiida_common_workflows/workflows/relax/bigdft/generator.py @@ -10,7 +10,7 @@ __all__ = ('BigDftCommonRelaxInputGenerator',) BigDFTParameters = plugins.DataFactory('bigdft') -StructureData = plugins.DataFactory('structure') +StructureData = plugins.DataFactory('core.structure') class BigDftCommonRelaxInputGenerator(CommonRelaxInputGenerator): diff --git a/aiida_common_workflows/workflows/relax/castep/__init__.py b/aiida_common_workflows/workflows/relax/castep/__init__.py index f5ab5c5a..93b2baa4 100644 --- a/aiida_common_workflows/workflows/relax/castep/__init__.py +++ b/aiida_common_workflows/workflows/relax/castep/__init__.py @@ -4,4 +4,4 @@ from .generator import * from .workchain import * -__all__ = (generator.__all__ + workchain.__all__) +__all__ = generator.__all__ + workchain.__all__ diff --git a/aiida_common_workflows/workflows/relax/castep/extractors.py b/aiida_common_workflows/workflows/relax/castep/extractors.py index 211b1f05..23e3258d 100644 --- a/aiida_common_workflows/workflows/relax/castep/extractors.py +++ b/aiida_common_workflows/workflows/relax/castep/extractors.py @@ -28,7 +28,7 @@ def get_ts_energy(common_relax_workchain): if common_relax_workchain.process_class != CastepCommonRelaxWorkChain: return ValueError('The input workchain is not a `CastepCommonRelaxWorkChain`') - castep_base_wc = common_relax_workchain.get_outgoing(link_type=LinkType.CALL_WORK).one().node + castep_base_wc = common_relax_workchain.base.links.get_outgoing(link_type=LinkType.CALL_WORK).one().node e_ks = castep_base_wc.outputs.output_parameters['total energy'] free_e = castep_base_wc.outputs.output_parameters['free energy'] diff --git a/aiida_common_workflows/workflows/relax/castep/generator.py b/aiida_common_workflows/workflows/relax/castep/generator.py index dba98eb0..07fa63ee 100644 --- a/aiida_common_workflows/workflows/relax/castep/generator.py +++ b/aiida_common_workflows/workflows/relax/castep/generator.py @@ -22,7 +22,7 @@ __all__ = ('CastepCommonRelaxInputGenerator',) -StructureData = plugins.DataFactory('structure') # pylint: disable=invalid-name +StructureData = plugins.DataFactory('core.structure') # pylint: disable=invalid-name class CastepCommonRelaxInputGenerator(CommonRelaxInputGenerator): @@ -37,7 +37,7 @@ def __init__(self, *args, **kwargs): def _initialize_protocols(self): """Initialize the protocols class attribute by parsing them from the configuration file.""" - with open(str(pathlib.Path(__file__).parent / 'protocol.yml')) as handle: + with open(str(pathlib.Path(__file__).parent / 'protocol.yml'), encoding='utf-8') as handle: self._protocols = yaml.safe_load(handle) @classmethod @@ -170,7 +170,7 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: # this is because the small basis set will give rise to errors in EOS / variable volume # relaxation even with the "fine" option if 'cut_off_energy' not in protocol['relax']['base']['calc']['parameters']: - with open(str(pathlib.Path(__file__).parent / 'soft_elements.yml')) as fhandle: + with open(str(pathlib.Path(__file__).parent / 'soft_elements.yml'), encoding='utf-8') as fhandle: soft_elements = yaml.safe_load(fhandle) symbols = [kind.symbol for kind in structure.kinds] if all(sym in soft_elements for sym in symbols): @@ -256,13 +256,11 @@ def generate_inputs( if isinstance(family_name, orm.Str): family_name = family_name.value try: - otfg_family = OTFGGroup.objects.get(label=family_name) + otfg_family = OTFGGroup.collection.get(label=family_name) except exceptions.NotExistent as exc: - raise ValueError( - 'protocol `{}` requires the `{}` `pseudos family` but could not be found.'.format( - protocol['name'], protocol['relax']['base']['pseudos_family'] - ) - ) from exc + name = protocol['name'] + family = protocol['relax']['base']['pseudos_family'] + raise ValueError(f'protocol `{name}` requires the `{family}` `pseudos family` but could not be found.') from exc CastepCalculation = plugins.CalculationFactory('castep.castep') # pylint: disable=invalid-name CastepBaseWorkChain = plugins.WorkflowFactory('castep.base') # pylint: disable=invalid-name @@ -426,7 +424,7 @@ def ensure_otfg_family(family_name, force_update=False): if isinstance(family_name, orm.Str): family_name = family_name.value try: - OTFGGroup.objects.get(label=family_name) + OTFGGroup.collection.get(label=family_name) except NotExistent: has_family = False else: @@ -441,7 +439,7 @@ def ensure_otfg_family(family_name, force_update=False): # Not an known family - check if it in the additional settings list # Load configuration from the settings - with open(str(pathlib.Path(__file__).parent / 'additional_otfg_families.yml')) as handle: + with open(str(pathlib.Path(__file__).parent / 'additional_otfg_families.yml'), encoding='utf-8') as handle: additional = yaml.safe_load(handle) if family_name in additional: diff --git a/aiida_common_workflows/workflows/relax/castep/workchain.py b/aiida_common_workflows/workflows/relax/castep/workchain.py index 07e535f9..118d2d60 100644 --- a/aiida_common_workflows/workflows/relax/castep/workchain.py +++ b/aiida_common_workflows/workflows/relax/castep/workchain.py @@ -53,7 +53,7 @@ def get_free_energy(parameters): Return the free energy from the given parameters node. The free energy reported by CASTEP is the one that is consistent with the forces. """ - return orm.Float(parameters.get_attribute('free_energy')) + return orm.Float(parameters.base.attributes.get('free_energy')) @calcfunction @@ -62,7 +62,7 @@ def get_total_magnetization(parameters): Return the free energy from the given parameters node. The free energy reported by CASTEP is the one that is consistent with the forces. """ - return orm.Float(parameters.get_attribute('spin_density')) + return orm.Float(parameters.base.attributes.get('spin_density')) class CastepCommonRelaxWorkChain(CommonRelaxWorkChain): diff --git a/aiida_common_workflows/workflows/relax/cp2k/__init__.py b/aiida_common_workflows/workflows/relax/cp2k/__init__.py index 0b3e6a05..d9e8ffd2 100644 --- a/aiida_common_workflows/workflows/relax/cp2k/__init__.py +++ b/aiida_common_workflows/workflows/relax/cp2k/__init__.py @@ -4,4 +4,4 @@ from .generator import * from .workchain import * -__all__ = (generator.__all__ + workchain.__all__) +__all__ = generator.__all__ + workchain.__all__ diff --git a/aiida_common_workflows/workflows/relax/cp2k/generator.py b/aiida_common_workflows/workflows/relax/cp2k/generator.py index faed11c0..81900dcc 100644 --- a/aiida_common_workflows/workflows/relax/cp2k/generator.py +++ b/aiida_common_workflows/workflows/relax/cp2k/generator.py @@ -15,8 +15,8 @@ __all__ = ('Cp2kCommonRelaxInputGenerator',) -StructureData = plugins.DataFactory('structure') # pylint: disable=invalid-name -KpointsData = plugins.DataFactory('array.kpoints') # pylint: disable=invalid-name +StructureData = plugins.DataFactory('core.structure') # pylint: disable=invalid-name +KpointsData = plugins.DataFactory('core.array.kpoints') # pylint: disable=invalid-name EV_A3_TO_GPA = 160.21766208 @@ -334,7 +334,7 @@ def _get_kpoints(kpoints_distance, structure, reference_workchain): if reference_workchain and 'cp2k__kpoints' in reference_workchain.inputs: kpoints_mesh = KpointsData() kpoints_mesh.set_cell_from_structure(structure) - kpoints_mesh.set_kpoints_mesh(reference_workchain.inputs.cp2k__kpoints.get_attribute('mesh')) + kpoints_mesh.set_kpoints_mesh(reference_workchain.inputs.cp2k__kpoints.base.attributes.get('mesh')) return kpoints_mesh if kpoints_distance: diff --git a/aiida_common_workflows/workflows/relax/cp2k/workchain.py b/aiida_common_workflows/workflows/relax/cp2k/workchain.py index b958778e..86b87bc1 100644 --- a/aiida_common_workflows/workflows/relax/cp2k/workchain.py +++ b/aiida_common_workflows/workflows/relax/cp2k/workchain.py @@ -22,7 +22,7 @@ @calcfunction def get_total_energy(parameters): """Return the total energy from the given parameters node.""" - return orm.Float(parameters.get_attribute('energy') * HA_TO_EV) + return orm.Float(parameters.base.attributes.get('energy') * HA_TO_EV) @calcfunction diff --git a/aiida_common_workflows/workflows/relax/fleur/__init__.py b/aiida_common_workflows/workflows/relax/fleur/__init__.py index 714d30ad..125dc38e 100644 --- a/aiida_common_workflows/workflows/relax/fleur/__init__.py +++ b/aiida_common_workflows/workflows/relax/fleur/__init__.py @@ -4,4 +4,4 @@ from .generator import * from .workchain import * -__all__ = (generator.__all__ + workchain.__all__) +__all__ = generator.__all__ + workchain.__all__ diff --git a/aiida_common_workflows/workflows/relax/fleur/extractors.py b/aiida_common_workflows/workflows/relax/fleur/extractors.py index ea6b333d..5f80d1ae 100644 --- a/aiida_common_workflows/workflows/relax/fleur/extractors.py +++ b/aiida_common_workflows/workflows/relax/fleur/extractors.py @@ -25,7 +25,7 @@ def get_ts_energy(common_relax_workchain): if common_relax_workchain.process_class != WorkflowFactory('common_workflows.relax.fleur'): return ValueError('The input workchain is not a `FleurCommonRelaxWorkChain`') - fleur_relax_wc = common_relax_workchain.get_outgoing(link_type=LinkType.CALL_WORK).one().node + fleur_relax_wc = common_relax_workchain.base.links.get_outgoing(link_type=LinkType.CALL_WORK).one().node fleur_calc_out = fleur_relax_wc.outputs.last_scf.last_calc output_parameters = fleur_calc_out.output_parameters diff --git a/aiida_common_workflows/workflows/relax/fleur/generator.py b/aiida_common_workflows/workflows/relax/fleur/generator.py index 8245902e..6ef4089a 100644 --- a/aiida_common_workflows/workflows/relax/fleur/generator.py +++ b/aiida_common_workflows/workflows/relax/fleur/generator.py @@ -15,7 +15,7 @@ __all__ = ('FleurCommonRelaxInputGenerator',) -StructureData = plugins.DataFactory('structure') +StructureData = plugins.DataFactory('core.structure') class FleurCommonRelaxInputGenerator(CommonRelaxInputGenerator): @@ -47,7 +47,7 @@ def __init__(self, *args, **kwargs): def _initialize_protocols(self): """Initialize the protocols class attribute by parsing them from the configuration file.""" - with open(str(pathlib.Path(__file__).parent / 'protocol.yml')) as handle: + with open(str(pathlib.Path(__file__).parent / 'protocol.yml'), encoding='utf-8') as handle: self._protocols = yaml.safe_load(handle) @classmethod diff --git a/aiida_common_workflows/workflows/relax/fleur/workchain.py b/aiida_common_workflows/workflows/relax/fleur/workchain.py index f05424ee..df945b4c 100644 --- a/aiida_common_workflows/workflows/relax/fleur/workchain.py +++ b/aiida_common_workflows/workflows/relax/fleur/workchain.py @@ -24,13 +24,13 @@ def get_forces_from_trajectory(trajectory): # pylint: disable=unused-argument @calcfunction def get_total_energy(parameters): """Calcfunction to get total energy from relax output""" - return orm.Float(parameters.get_attribute('energy')) + return orm.Float(parameters.base.attributes.get('energy')) @calcfunction def get_total_magnetization(parameters): """Return the total magnetic moment of the cell from the given parameters node.""" - return orm.Float(parameters.get_attribute('total_magnetic_moment_cell')) + return orm.Float(parameters.base.attributes.get('total_magnetic_moment_cell')) class FleurCommonRelaxWorkChain(CommonRelaxWorkChain): diff --git a/aiida_common_workflows/workflows/relax/gaussian/__init__.py b/aiida_common_workflows/workflows/relax/gaussian/__init__.py index 1993b70f..b6465874 100644 --- a/aiida_common_workflows/workflows/relax/gaussian/__init__.py +++ b/aiida_common_workflows/workflows/relax/gaussian/__init__.py @@ -4,4 +4,4 @@ from .generator import * from .workchain import * -__all__ = (generator.__all__ + workchain.__all__) +__all__ = generator.__all__ + workchain.__all__ diff --git a/aiida_common_workflows/workflows/relax/gaussian/generator.py b/aiida_common_workflows/workflows/relax/gaussian/generator.py index df8ad0b3..1783f0a4 100644 --- a/aiida_common_workflows/workflows/relax/gaussian/generator.py +++ b/aiida_common_workflows/workflows/relax/gaussian/generator.py @@ -12,7 +12,7 @@ __all__ = ('GaussianCommonRelaxInputGenerator',) -StructureData = plugins.DataFactory('structure') +StructureData = plugins.DataFactory('core.structure') EV_TO_EH = 0.03674930814 ANG_TO_BOHR = 1.88972687 @@ -80,7 +80,7 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: magnetization_per_site = kwargs.get('magnetization_per_site', None) threshold_forces = kwargs.get('threshold_forces', None) - if any(structure.get_attribute_many(['pbc1', 'pbc2', 'pbc3'])): + if any(structure.base.attributes.get_many(['pbc1', 'pbc2', 'pbc3'])): print('Warning: PBC detected in input structure. It is not supported and thus ignored.') # ----------------------------------------------------------------- @@ -96,7 +96,7 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: link0_parameters['%mem'] = '2048MB' else: # If memory is set, specify 80% of it to gaussian - link0_parameters['%mem'] = '%dMB' % ((0.8 * options['max_memory_kb']) // 1024) + link0_parameters['%mem'] = '%dMB' % ((0.8 * options['max_memory_kb']) // 1024) # pylint: disable=consider-using-f-string) # Determine the number of processors that should be specified to Gaussian n_proc = None @@ -112,7 +112,7 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: n_proc = res['num_machines'] * def_mppm if n_proc is not None: - link0_parameters['%nprocshared'] = '%d' % n_proc + link0_parameters['%nprocshared'] = int(n_proc) # ----------------------------------------------------------------- # General route parameters @@ -131,7 +131,7 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: print('Warning: Forces threshold cannot be lower than 1e-6 au.') threshold_forces_au = 1e-6 threshold_forces_n = int(np.round(threshold_forces_au * 1e6)) - route_params['iop(1/7=%d)' % threshold_forces_n] = None + route_params[f'iop(1/7={threshold_forces_n})'] = None # ----------------------------------------------------------------- # Handle spin-polarization diff --git a/aiida_common_workflows/workflows/relax/generator.py b/aiida_common_workflows/workflows/relax/generator.py index e84061d0..808c8cfc 100644 --- a/aiida_common_workflows/workflows/relax/generator.py +++ b/aiida_common_workflows/workflows/relax/generator.py @@ -27,7 +27,7 @@ def define(cls, spec): super().define(spec) spec.input( 'structure', - valid_type=plugins.DataFactory('structure'), + valid_type=plugins.DataFactory('core.structure'), help='The structure whose geometry should be optimized.' ) spec.input( diff --git a/aiida_common_workflows/workflows/relax/gpaw/__init__.py b/aiida_common_workflows/workflows/relax/gpaw/__init__.py index e69de29b..6b347a35 100644 --- a/aiida_common_workflows/workflows/relax/gpaw/__init__.py +++ b/aiida_common_workflows/workflows/relax/gpaw/__init__.py @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# pylint: disable=undefined-variable +"""Module with the implementations of the common structure relaxation workchain for GPAW.""" +from .generator import * +from .workchain import * + +__all__ = generator.__all__ + workchain.__all__ diff --git a/aiida_common_workflows/workflows/relax/gpaw/generator.py b/aiida_common_workflows/workflows/relax/gpaw/generator.py index 25b72e57..60378c6b 100644 --- a/aiida_common_workflows/workflows/relax/gpaw/generator.py +++ b/aiida_common_workflows/workflows/relax/gpaw/generator.py @@ -1,7 +1,9 @@ # -*- coding: utf-8 -*- """Implementation of `aiida_common_workflows.common.relax.generator.CommonRelaxInputGenerator` for GPAW.""" +from __future__ import annotations + import pathlib -from typing import Any, Dict, List, Tuple, Union +from typing import Any, Dict, List, Tuple from aiida import engine, orm, plugins import yaml @@ -12,7 +14,7 @@ __all__ = ('GpawCommonRelaxInputGenerator',) -StructureData = plugins.DataFactory('structure') +StructureData = plugins.DataFactory('core.structure') class GpawCommonRelaxInputGenerator(CommonRelaxInputGenerator): @@ -38,7 +40,7 @@ def __init__(self, *args, **kwargs): def _initialize_protocols(self): """Initialize the protocols class attribute by parsing them from the protocols configuration file.""" - with open(str(pathlib.Path(__file__).parent / 'protocol.yml'), encoding='UTF-8') as handle: + with open(str(pathlib.Path(__file__).parent / 'protocol.yml'), encoding='utf-8') as handle: self._protocols = yaml.safe_load(handle) def _construct_builder( # pylint: disable=arguments-differ,too-many-locals @@ -47,10 +49,10 @@ def _construct_builder( # pylint: disable=arguments-differ,too-many-locals engines: Dict[str, Any], *, protocol: str = None, - relax_type: Union[RelaxType, str] = RelaxType.POSITIONS, - electronic_type: Union[ElectronicType, str] = ElectronicType.METAL, - spin_type: Union[SpinType, str] = SpinType.NONE, - magnetization_per_site: Union[List[float], Tuple[float]] = None, + relax_type: RelaxType | str = RelaxType.POSITIONS, + electronic_type: ElectronicType | str = ElectronicType.METAL, + spin_type: SpinType | str = SpinType.NONE, + magnetization_per_site: List[float] | Tuple[float] | None = None, threshold_forces: float = None, threshold_stress: float = None, reference_workchain=None, @@ -111,11 +113,13 @@ def _construct_builder( # pylint: disable=arguments-differ,too-many-locals if relax_type == RelaxType.NONE: parameters.pop('optimizer', {}) - kpoints = plugins.DataFactory('array.kpoints')() + kpoints = plugins.DataFactory('core.array.kpoints')() kpoints.set_cell_from_structure(structure) if reference_workchain: previous_kpoints = reference_workchain.inputs.kpoints - kpoints.set_kpoints_mesh(previous_kpoints.get_attribute('mesh'), previous_kpoints.get_attribute('offset')) + kpoints.set_kpoints_mesh( + previous_kpoints.base.attributes.get('mesh'), previous_kpoints.base.attributes.get('offset') + ) else: kpoints.set_kpoints_mesh_from_density(protocol['kpoint_distance']) diff --git a/aiida_common_workflows/workflows/relax/gpaw/workchain.py b/aiida_common_workflows/workflows/relax/gpaw/workchain.py index 5a229062..9cabbd78 100644 --- a/aiida_common_workflows/workflows/relax/gpaw/workchain.py +++ b/aiida_common_workflows/workflows/relax/gpaw/workchain.py @@ -21,7 +21,7 @@ def extract_forces_from_array(array): @calcfunction def extract_total_energy_from_parameters(parameters): """Return the total energy from the given parameters node.""" - energy_cont = parameters.get_attribute('energy_contributions') + energy_cont = parameters.base.attributes.get('energy_contributions') total_energy = energy_cont['xc'] + energy_cont['local'] + energy_cont['kinetic'] total_energy += energy_cont['external'] + energy_cont['potential'] + energy_cont['entropy (-st)'] return orm.Float(total_energy) diff --git a/aiida_common_workflows/workflows/relax/nwchem/__init__.py b/aiida_common_workflows/workflows/relax/nwchem/__init__.py index 5f27d24a..14c9f0c9 100644 --- a/aiida_common_workflows/workflows/relax/nwchem/__init__.py +++ b/aiida_common_workflows/workflows/relax/nwchem/__init__.py @@ -4,4 +4,4 @@ from .generator import * from .workchain import * -__all__ = (generator.__all__ + workchain.__all__) +__all__ = generator.__all__ + workchain.__all__ diff --git a/aiida_common_workflows/workflows/relax/nwchem/generator.py b/aiida_common_workflows/workflows/relax/nwchem/generator.py index 5a38082c..454bcbf0 100644 --- a/aiida_common_workflows/workflows/relax/nwchem/generator.py +++ b/aiida_common_workflows/workflows/relax/nwchem/generator.py @@ -14,7 +14,7 @@ __all__ = ('NwchemCommonRelaxInputGenerator',) -StructureData = plugins.DataFactory('structure') +StructureData = plugins.DataFactory('core.structure') HA_BOHR_TO_EV_A = 51.42208619083232 @@ -31,7 +31,7 @@ def __init__(self, *args, **kwargs): def _initialize_protocols(self): """Initialize the protocols class attribute by parsing them from the configuration file.""" - with open(pathlib.Path(__file__).parent / 'protocol.yml') as handle: + with open(pathlib.Path(__file__).parent / 'protocol.yml', encoding='utf-8') as handle: self._protocols = yaml.safe_load(handle) @classmethod @@ -78,7 +78,7 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: else: reciprocal_axes_lengths = np.linalg.norm(np.linalg.inv(structure.cell), axis=1) kpoints = np.ceil(reciprocal_axes_lengths / target_spacing).astype(int).tolist() - parameters['nwpw']['monkhorst-pack'] = '{} {} {}'.format(*kpoints) + parameters['nwpw']['monkhorst-pack'] = '{} {} {}'.format(*kpoints) # pylint: disable=consider-using-f-string # Relaxation type if relax_type == RelaxType.POSITIONS: diff --git a/aiida_common_workflows/workflows/relax/nwchem/workchain.py b/aiida_common_workflows/workflows/relax/nwchem/workchain.py index 585b0083..60637412 100644 --- a/aiida_common_workflows/workflows/relax/nwchem/workchain.py +++ b/aiida_common_workflows/workflows/relax/nwchem/workchain.py @@ -33,7 +33,7 @@ def get_forces(parameters): """Return the forces array [eV/ang] from the output parameters node.""" forces_au = np.array(parameters['final_energy']['forces'], dtype=float) forces_ev = orm.ArrayData() - forces_ev.set_array(name='forces', array=(forces_au * HA_BOHR_TO_EV_A)) + forces_ev.set_array(name='forces', array=forces_au * HA_BOHR_TO_EV_A) return forces_ev diff --git a/aiida_common_workflows/workflows/relax/orca/__init__.py b/aiida_common_workflows/workflows/relax/orca/__init__.py index 1efe199e..fc7f1d1c 100644 --- a/aiida_common_workflows/workflows/relax/orca/__init__.py +++ b/aiida_common_workflows/workflows/relax/orca/__init__.py @@ -4,4 +4,4 @@ from .generator import * from .workchain import * -__all__ = (generator.__all__ + workchain.__all__) +__all__ = generator.__all__ + workchain.__all__ diff --git a/aiida_common_workflows/workflows/relax/orca/generator.py b/aiida_common_workflows/workflows/relax/orca/generator.py index f0173140..28526deb 100644 --- a/aiida_common_workflows/workflows/relax/orca/generator.py +++ b/aiida_common_workflows/workflows/relax/orca/generator.py @@ -16,7 +16,7 @@ __all__ = ('OrcaCommonRelaxInputGenerator',) -StructureData = DataFactory('structure') +StructureData = DataFactory('core.structure') class OrcaCommonRelaxInputGenerator(CommonRelaxInputGenerator): @@ -43,7 +43,7 @@ def _initialize_protocols(self): """Initialize the protocols class attribute by parsing them from the configuration file.""" yamlpath = os.path.join(os.path.dirname(__file__), 'protocol.yml') - with open(yamlpath) as handler: + with open(yamlpath, encoding='utf-8') as handler: self._protocols = yaml.safe_load(handler) @classmethod @@ -72,7 +72,7 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: magnetization_per_site = kwargs.get('magnetization_per_site', None) # Checks - if any(structure.get_attribute_many(['pbc1', 'pbc2', 'pbc2'])): + if any(structure.base.attributes.get_many(['pbc1', 'pbc2', 'pbc2'])): warnings.warn('PBC detected in the input structure. It is not supported and thus is ignored.') if protocol not in self.get_protocol_names(): diff --git a/aiida_common_workflows/workflows/relax/quantum_espresso/__init__.py b/aiida_common_workflows/workflows/relax/quantum_espresso/__init__.py index 987321e9..7b873bc8 100644 --- a/aiida_common_workflows/workflows/relax/quantum_espresso/__init__.py +++ b/aiida_common_workflows/workflows/relax/quantum_espresso/__init__.py @@ -4,4 +4,4 @@ from .generator import * from .workchain import * -__all__ = (generator.__all__ + workchain.__all__) +__all__ = generator.__all__ + workchain.__all__ diff --git a/aiida_common_workflows/workflows/relax/quantum_espresso/extractors.py b/aiida_common_workflows/workflows/relax/quantum_espresso/extractors.py index 804fc199..4e247bee 100644 --- a/aiida_common_workflows/workflows/relax/quantum_espresso/extractors.py +++ b/aiida_common_workflows/workflows/relax/quantum_espresso/extractors.py @@ -27,5 +27,5 @@ def get_ts_energy(common_relax_workchain: QuantumEspressoCommonRelaxWorkChain) - if common_relax_workchain.process_class != QuantumEspressoCommonRelaxWorkChain: return ValueError('The input workchain is not a `QuantumEspressoCommonRelaxWorkChain`') - qe_relax_wc = common_relax_workchain.get_outgoing(link_type=LinkType.CALL_WORK).one().node + qe_relax_wc = common_relax_workchain.base.links.get_outgoing(link_type=LinkType.CALL_WORK).one().node return -qe_relax_wc.outputs.output_parameters['energy_smearing'] diff --git a/aiida_common_workflows/workflows/relax/quantum_espresso/generator.py b/aiida_common_workflows/workflows/relax/quantum_espresso/generator.py index 1edc9169..2f615c39 100644 --- a/aiida_common_workflows/workflows/relax/quantum_espresso/generator.py +++ b/aiida_common_workflows/workflows/relax/quantum_espresso/generator.py @@ -13,7 +13,7 @@ __all__ = ('QuantumEspressoCommonRelaxInputGenerator',) -StructureData = plugins.DataFactory('structure') +StructureData = plugins.DataFactory('core.structure') def create_magnetic_allotrope(structure, magnetization_per_site): @@ -197,7 +197,7 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: builder.base['pw']['parameters'] = orm.Dict(dict=parameters) if reference_workchain: - relax = reference_workchain.get_outgoing(node_class=orm.WorkChainNode).one().node + relax = reference_workchain.base.links.get_outgoing(node_class=orm.WorkChainNode).one().node base = sorted(relax.called, key=lambda x: x.ctime)[-1] calc = sorted(base.called, key=lambda x: x.ctime)[-1] kpoints = calc.inputs.kpoints diff --git a/aiida_common_workflows/workflows/relax/quantum_espresso/workchain.py b/aiida_common_workflows/workflows/relax/quantum_espresso/workchain.py index 02980277..302b14ba 100644 --- a/aiida_common_workflows/workflows/relax/quantum_espresso/workchain.py +++ b/aiida_common_workflows/workflows/relax/quantum_espresso/workchain.py @@ -30,8 +30,8 @@ def extract_from_trajectory(trajectory): @calcfunction def extract_from_parameters(parameters): """Return the total energy and optionally the total magnetization from the given parameters node.""" - total_energy = parameters.get_attribute('energy') - total_magnetization = parameters.get_attribute('total_magnetization', None) + total_energy = parameters.base.attributes.get('energy') + total_magnetization = parameters.base.attributes.get('total_magnetization', None) results = {'total_energy': orm.Float(total_energy)} @@ -55,7 +55,7 @@ def convert_outputs(self): forces, stress = extract_from_trajectory(outputs.output_trajectory).values() try: - total_energy, total_magnetization = result + total_energy, total_magnetization = result # pylint: disable=unbalanced-dict-unpacking except ValueError: total_energy, total_magnetization = list(result)[0], None diff --git a/aiida_common_workflows/workflows/relax/siesta/__init__.py b/aiida_common_workflows/workflows/relax/siesta/__init__.py index de50e8fd..8475c406 100644 --- a/aiida_common_workflows/workflows/relax/siesta/__init__.py +++ b/aiida_common_workflows/workflows/relax/siesta/__init__.py @@ -4,4 +4,4 @@ from .generator import * from .workchain import * -__all__ = (generator.__all__ + workchain.__all__) +__all__ = generator.__all__ + workchain.__all__ diff --git a/aiida_common_workflows/workflows/relax/siesta/extractors.py b/aiida_common_workflows/workflows/relax/siesta/extractors.py index c4eef2de..37d2bfea 100644 --- a/aiida_common_workflows/workflows/relax/siesta/extractors.py +++ b/aiida_common_workflows/workflows/relax/siesta/extractors.py @@ -22,7 +22,7 @@ def get_ts_energy(common_relax_workchain): if common_relax_workchain.process_class != WorkflowFactory('common_workflows.relax.siesta'): return ValueError('The input workchain is not a `CommonWorkflowSiestaWorkChain`') - siesta_base_wc = common_relax_workchain.get_outgoing(link_type=LinkType.CALL_WORK).one().node + siesta_base_wc = common_relax_workchain.base.links.get_outgoing(link_type=LinkType.CALL_WORK).one().node e_ks = siesta_base_wc.outputs.output_parameters['E_KS'] free_e = siesta_base_wc.outputs.output_parameters['FreeE'] diff --git a/aiida_common_workflows/workflows/relax/siesta/generator.py b/aiida_common_workflows/workflows/relax/siesta/generator.py index d215e2aa..c6fcf922 100644 --- a/aiida_common_workflows/workflows/relax/siesta/generator.py +++ b/aiida_common_workflows/workflows/relax/siesta/generator.py @@ -13,7 +13,7 @@ __all__ = ('SiestaCommonRelaxInputGenerator',) -StructureData = plugins.DataFactory('structure') +StructureData = plugins.DataFactory('core.structure') class SiestaCommonRelaxInputGenerator(CommonRelaxInputGenerator): @@ -54,7 +54,7 @@ def _initialize_protocols(self): """Initialize the protocols class attribute by parsing them from the configuration file.""" _filepath = os.path.join(os.path.dirname(__file__), 'protocol.yml') - with open(_filepath) as _thefile: + with open(_filepath, encoding='utf-8') as _thefile: self._protocols = yaml.full_load(_thefile) @classmethod @@ -98,7 +98,7 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: pseudo_family = self._protocols[protocol]['pseudo_family'] try: - orm.Group.objects.get(label=pseudo_family) + orm.Group.collection.get(label=pseudo_family) except exceptions.NotExistent as exc: raise ValueError( f'protocol `{protocol}` requires `pseudo_family` with name {pseudo_family} ' @@ -213,8 +213,8 @@ def _get_param(self, key, structure, reference_workchain): # pylint: disable=to #the underline SiestaBaseWorkChain. if reference_workchain is not None: from aiida.orm import WorkChainNode - siesta_base_outs = reference_workchain.get_outgoing(node_class=WorkChainNode).one().node.outputs - mesh = siesta_base_outs.output_parameters.attributes['mesh'] + siesta_base_outs = reference_workchain.base.links.get_outgoing(node_class=WorkChainNode).one().node.outputs + mesh = siesta_base_outs.output_parameters.base.attributes.get('mesh') parameters['mesh-sizes'] = f'[{mesh[0]} {mesh[1]} {mesh[2]}]' parameters.pop('mesh-cutoff', None) @@ -281,7 +281,9 @@ def _get_kpoints(self, key, structure, reference_workchain): kpoints_mesh = KpointsData() kpoints_mesh.set_cell_from_structure(structure) previous_wc_kp = reference_workchain.inputs.kpoints - kpoints_mesh.set_kpoints_mesh(previous_wc_kp.get_attribute('mesh'), previous_wc_kp.get_attribute('offset')) + kpoints_mesh.set_kpoints_mesh( + previous_wc_kp.base.attributes.get('mesh'), previous_wc_kp.base.attributes.get('offset') + ) return kpoints_mesh if 'kpoints' in self._protocols[key]: diff --git a/aiida_common_workflows/workflows/relax/siesta/workchain.py b/aiida_common_workflows/workflows/relax/siesta/workchain.py index 2b6712ad..d1ef28bd 100644 --- a/aiida_common_workflows/workflows/relax/siesta/workchain.py +++ b/aiida_common_workflows/workflows/relax/siesta/workchain.py @@ -57,6 +57,6 @@ def convert_outputs(self): res_dict = get_forces_and_stress(self.ctx.workchain.outputs.forces_and_stress) self.out('forces', res_dict['forces']) self.out('stress', res_dict['stress']) - if 'stot' in self.ctx.workchain.outputs.output_parameters.attributes: + if 'stot' in self.ctx.workchain.outputs.output_parameters.base.attributes.all: self.out('total_magnetization', get_magn(self.ctx.workchain.outputs.output_parameters)) self.out('remote_folder', self.ctx.workchain.outputs.remote_folder) diff --git a/aiida_common_workflows/workflows/relax/vasp/__init__.py b/aiida_common_workflows/workflows/relax/vasp/__init__.py index 1558ec0d..f503e9aa 100644 --- a/aiida_common_workflows/workflows/relax/vasp/__init__.py +++ b/aiida_common_workflows/workflows/relax/vasp/__init__.py @@ -4,4 +4,4 @@ from .generator import * from .workchain import * -__all__ = (generator.__all__ + workchain.__all__) +__all__ = generator.__all__ + workchain.__all__ diff --git a/aiida_common_workflows/workflows/relax/vasp/extractors.py b/aiida_common_workflows/workflows/relax/vasp/extractors.py index 9fe46478..f56ec5eb 100644 --- a/aiida_common_workflows/workflows/relax/vasp/extractors.py +++ b/aiida_common_workflows/workflows/relax/vasp/extractors.py @@ -19,7 +19,7 @@ def get_ts_energy(common_relax_workchain): if common_relax_workchain.process_class != WorkflowFactory('common_workflows.relax.vasp'): return ValueError('The input workchain is not a `VaspCommonRelaxWorkChain`') - vasp_wc = common_relax_workchain.get_outgoing(link_type=LinkType.CALL_WORK).one().node + vasp_wc = common_relax_workchain.base.links.get_outgoing(link_type=LinkType.CALL_WORK).one().node energies = vasp_wc.outputs.energies energy_free = energies.get_array('energy_free_electronic')[0] energy_no_entropy = energies.get_array('energy_no_entropy')[0] diff --git a/aiida_common_workflows/workflows/relax/vasp/generator.py b/aiida_common_workflows/workflows/relax/vasp/generator.py index 970026e8..2b98fcea 100644 --- a/aiida_common_workflows/workflows/relax/vasp/generator.py +++ b/aiida_common_workflows/workflows/relax/vasp/generator.py @@ -13,7 +13,7 @@ __all__ = ('VaspCommonRelaxInputGenerator',) -StructureData = plugins.DataFactory('structure') +StructureData = plugins.DataFactory('core.structure') class VaspCommonRelaxInputGenerator(CommonRelaxInputGenerator): @@ -43,12 +43,12 @@ def __init__(self, *args, **kwargs): def _initialize_protocols(self): """Initialize the protocols class attribute by parsing them from the protocols configuration file.""" - with open(str(pathlib.Path(__file__).parent / 'protocol.yml'), encoding='UTF-8') as handle: + with open(str(pathlib.Path(__file__).parent / 'protocol.yml'), encoding='utf-8') as handle: self._protocols = yaml.safe_load(handle) def _initialize_potential_mapping(self): """Initialize the potential mapping from the potential_mapping configuration file.""" - with open(str(pathlib.Path(__file__).parent / 'potential_mapping.yml'), encoding='UTF-8') as handle: + with open(str(pathlib.Path(__file__).parent / 'potential_mapping.yml'), encoding='utf-8') as handle: self._potential_mapping = yaml.safe_load(handle) @classmethod @@ -95,10 +95,10 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: builder.structure = structure # Set options - builder.options = plugins.DataFactory('dict')(dict=engines['relax']['options']) + builder.options = plugins.DataFactory('core.dict')(dict=engines['relax']['options']) # Set workchain related inputs, in this case, give more explicit output to report - builder.verbose = plugins.DataFactory('bool')(True) + builder.verbose = plugins.DataFactory('core.bool')(True) # Fetch initial parameters from the protocol file. # Here we set the protocols fast, moderate and precise. These currently have no formal meaning. @@ -160,7 +160,7 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: 'energy_type': ['energy_free', 'energy_no_entropy'] } }) - builder.settings = plugins.DataFactory('dict')(dict=settings) + builder.settings = plugins.DataFactory('core.dict')(dict=settings) # Configure the handlers handler_overrides = { @@ -171,24 +171,26 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: 'handler_unfinished_calc_generic': False, 'handler_electronic_conv': False } - builder.handler_overrides = plugins.DataFactory('dict')(dict=handler_overrides) + builder.handler_overrides = plugins.DataFactory('core.dict')(dict=handler_overrides) # Set the parameters on the builder, put it in the code namespace to pass through # to the code inputs - builder.parameters = plugins.DataFactory('dict')(dict={'incar': parameters_dict}) + builder.parameters = plugins.DataFactory('core.dict')(dict={'incar': parameters_dict}) # Set potentials and their mapping builder.potential_family = plugins.DataFactory('str')(protocol['potential_family']) - builder.potential_mapping = plugins.DataFactory('dict')( + builder.potential_mapping = plugins.DataFactory('core.dict')( dict=self._potential_mapping[protocol['potential_mapping']] ) # Set the kpoint grid from the density in the protocol - kpoints = plugins.DataFactory('array.kpoints')() + kpoints = plugins.DataFactory('core.array.kpoints')() kpoints.set_cell_from_structure(structure) if reference_workchain: previous_kpoints = reference_workchain.inputs.kpoints - kpoints.set_kpoints_mesh(previous_kpoints.get_attribute('mesh'), previous_kpoints.get_attribute('offset')) + kpoints.set_kpoints_mesh( + previous_kpoints.base.attributes.get('mesh'), previous_kpoints.base.attributes.get('offset') + ) else: kpoints.set_kpoints_mesh_from_density(protocol['kpoint_distance']) builder.kpoints = kpoints @@ -197,36 +199,36 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: relax = AttributeDict() if relax_type != RelaxType.NONE: # Perform relaxation of cell or positions - relax.perform = plugins.DataFactory('bool')(True) + relax.perform = plugins.DataFactory('core.bool')(True) relax.algo = plugins.DataFactory('str')(protocol['relax']['algo']) relax.steps = plugins.DataFactory('int')(protocol['relax']['steps']) if relax_type == RelaxType.POSITIONS: - relax.positions = plugins.DataFactory('bool')(True) - relax.shape = plugins.DataFactory('bool')(False) - relax.volume = plugins.DataFactory('bool')(False) + relax.positions = plugins.DataFactory('core.bool')(True) + relax.shape = plugins.DataFactory('core.bool')(False) + relax.volume = plugins.DataFactory('core.bool')(False) elif relax_type == RelaxType.CELL: - relax.positions = plugins.DataFactory('bool')(False) - relax.shape = plugins.DataFactory('bool')(True) - relax.volume = plugins.DataFactory('bool')(True) + relax.positions = plugins.DataFactory('core.bool')(False) + relax.shape = plugins.DataFactory('core.bool')(True) + relax.volume = plugins.DataFactory('core.bool')(True) elif relax_type == RelaxType.VOLUME: - relax.positions = plugins.DataFactory('bool')(False) - relax.shape = plugins.DataFactory('bool')(False) - relax.volume = plugins.DataFactory('bool')(True) + relax.positions = plugins.DataFactory('core.bool')(False) + relax.shape = plugins.DataFactory('core.bool')(False) + relax.volume = plugins.DataFactory('core.bool')(True) elif relax_type == RelaxType.SHAPE: - relax.positions = plugins.DataFactory('bool')(False) - relax.shape = plugins.DataFactory('bool')(True) - relax.volume = plugins.DataFactory('bool')(False) + relax.positions = plugins.DataFactory('core.bool')(False) + relax.shape = plugins.DataFactory('core.bool')(True) + relax.volume = plugins.DataFactory('core.bool')(False) elif relax_type == RelaxType.POSITIONS_CELL: - relax.positions = plugins.DataFactory('bool')(True) - relax.shape = plugins.DataFactory('bool')(True) - relax.volume = plugins.DataFactory('bool')(True) + relax.positions = plugins.DataFactory('core.bool')(True) + relax.shape = plugins.DataFactory('core.bool')(True) + relax.volume = plugins.DataFactory('core.bool')(True) elif relax_type == RelaxType.POSITIONS_SHAPE: - relax.positions = plugins.DataFactory('bool')(True) - relax.shape = plugins.DataFactory('bool')(True) - relax.volume = plugins.DataFactory('bool')(False) + relax.positions = plugins.DataFactory('core.bool')(True) + relax.shape = plugins.DataFactory('core.bool')(True) + relax.volume = plugins.DataFactory('core.bool')(False) else: # Do not perform any relaxation - relax.perform = plugins.DataFactory('bool')(False) + relax.perform = plugins.DataFactory('core.bool')(False) if threshold_forces is not None: threshold = threshold_forces diff --git a/aiida_common_workflows/workflows/relax/wien2k/__init__.py b/aiida_common_workflows/workflows/relax/wien2k/__init__.py index de50e8fd..848e8bd6 100644 --- a/aiida_common_workflows/workflows/relax/wien2k/__init__.py +++ b/aiida_common_workflows/workflows/relax/wien2k/__init__.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # pylint: disable=undefined-variable -"""Module with the implementations of the common structure relaxation workchain for Siesta.""" +"""Module with the implementations of the common structure relaxation workchain for Wien2k.""" from .generator import * from .workchain import * -__all__ = (generator.__all__ + workchain.__all__) +__all__ = generator.__all__ + workchain.__all__ diff --git a/aiida_common_workflows/workflows/relax/wien2k/generator.py b/aiida_common_workflows/workflows/relax/wien2k/generator.py index cab405da..cd4109e2 100644 --- a/aiida_common_workflows/workflows/relax/wien2k/generator.py +++ b/aiida_common_workflows/workflows/relax/wien2k/generator.py @@ -12,7 +12,7 @@ __all__ = ('Wien2kCommonRelaxInputGenerator',) -StructureData = plugins.DataFactory('structure') +StructureData = plugins.DataFactory('core.structure') class Wien2kCommonRelaxInputGenerator(CommonRelaxInputGenerator): @@ -84,7 +84,7 @@ def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: inpdict['-nometal'] = True if reference_workchain: # ref. workchain is passed as input # derive Rmt's from the ref. workchain and pass as input - w2k_wchain = reference_workchain.get_outgoing(node_class=orm.WorkChainNode).one().node + w2k_wchain = reference_workchain.base.links.get_outgoing(node_class=orm.WorkChainNode).one().node ref_wrkchn_res_dict = w2k_wchain.outputs.workchain_result.get_dict() rmt = ref_wrkchn_res_dict['Rmt'] atm_lbl = ref_wrkchn_res_dict['atom_labels'] diff --git a/docs/source/workflows/composite/dc.rst b/docs/source/workflows/composite/dc.rst index 8166bacc..3719da39 100644 --- a/docs/source/workflows/composite/dc.rst +++ b/docs/source/workflows/composite/dc.rst @@ -103,7 +103,7 @@ A template script to retrieve the results follows: node = load_node() # is an identifier (PK, uuid, ..) of a completed DC workchain - outputs = node.get_outgoing(link_type=LinkType.RETURN).nested() + outputs = node.base.links.get_outgoing(link_type=LinkType.RETURN).nested() distances = [] energies = [] diff --git a/docs/source/workflows/composite/eos.rst b/docs/source/workflows/composite/eos.rst index 121139be..c0089cc6 100644 --- a/docs/source/workflows/composite/eos.rst +++ b/docs/source/workflows/composite/eos.rst @@ -103,7 +103,7 @@ A template script to retrieve the results follows: node = load_node() # is an identifier (PK, uuid, ..) of a completed EoS workchain - outputs = node.get_outgoing(link_type=LinkType.RETURN).nested() + outputs = node.base.links.get_outgoing(link_type=LinkType.RETURN).nested() volumes = [] energies = [] diff --git a/pyproject.toml b/pyproject.toml index 812ed060..8b20617d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,34 +15,32 @@ classifiers = [ 'Operating System :: POSIX :: Linux', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python', - 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', ] keywords = ['aiida', 'workflows'] -requires-python = '>=3.8,<3.10' +requires-python = '>=3.9' dependencies = [ 'abipy==0.9.6', - 'aiida-abinit~=0.4.0', - 'aiida-bigdft>=0.2.6', - 'aiida-castep>=1.2.0a5', - 'aiida-core[atomic_tools]~=1.6', - 'aiida-cp2k~=1.6', - 'aiida-fleur>=1.3.0', - 'aiida-gaussian', - 'aiida-nwchem>=2.1.0', - 'aiida-orca', - 'aiida-pseudo==0.6.5', - 'aiida-quantumespresso~=3.4,>=3.4.1', - 'aiida-siesta>=1.2.0', - 'aiida-vasp~=2.2', - 'aiida-wien2k~=0.1.1', - 'aiida-ase', - 'pymatgen>=2022.1.20', - 'numpy<1.24.0', - 'sqlalchemy<1.4', + 'aiida-ase~=3.0', + 'aiida-abinit~=0.5.0', + 'aiida-bigdft~=0.3.0', + 'aiida-castep~=2.0', + 'aiida-core[atomic_tools]~=2.1', + 'aiida-cp2k~=2.0', + 'aiida-fleur~=2.0', + 'aiida-gaussian~=2.0', + 'aiida-nwchem~=3.0', + 'aiida-orca~=0.6.0', + 'aiida-pseudo~=1.0', + 'aiida-quantumespresso~=4.4', + 'aiida-siesta~=2.0', + 'aiida-vasp~=3.1', + 'aiida-wien2k~=0.2.0', 'ase!=3.20.*', + 'masci-tools~=0.9', + 'numpy<1.24.0', 'pint~=0.16', - 'masci-tools~=0.9' + 'pymatgen>=2022.1.20', ] [project.urls] @@ -62,7 +60,7 @@ docs = [ ] pre-commit = [ 'pre-commit~=2.2', - 'pylint~=2.5.2', + 'pylint~=2.16.0', ] tests = [ 'pytest~=7.2', @@ -129,11 +127,10 @@ disable = [ 'import-outside-toplevel', 'inconsistent-return-statements', 'too-many-arguments', - 'bad-continuation', 'duplicate-code', 'no-member', 'too-few-public-methods', - "wrong-import-order" + 'wrong-import-order' ] [tool.pytest.ini_options] diff --git a/tests/cli/test_launch.py b/tests/cli/test_launch.py index f618778e..fea7e9db 100644 --- a/tests/cli/test_launch.py +++ b/tests/cli/test_launch.py @@ -141,8 +141,7 @@ def test_eos_relax_types(run_cli_command, generate_structure, generate_code): # Test that a non-sensical relax type raises options = ['-S', str(structure.pk), '-r', 'cell', 'quantum_espresso'] result = run_cli_command(launch.cmd_eos, options, raises=click.BadParameter) - assert "Error: Invalid value for '-r' / '--relax-type': invalid choice: cell. " \ - '(choose from none, positions, shape, positions_shape)' in result.output_lines + assert "Error: Invalid value for '-r' / '--relax-type': 'cell' is not one of " in result.output def test_dissociation_curve_wallclock_seconds(run_cli_command, generate_structure, generate_code): @@ -205,5 +204,4 @@ def test_relax_magn_per_type(run_cli_command, generate_structure, generate_code) # Test that only `float` are admissible options = ['-S', str(structure.pk), '--magnetization-per-site', 'str', '--', 'quantum_espresso'] result = run_cli_command(launch.cmd_relax, options, raises=click.BadParameter) - assert "Error: Invalid value for '--magnetization-per-site': str is not a valid floating point " \ - 'value' in result.output + assert "Error: Invalid value for '--magnetization-per-site': 'str' is not a valid float." in result.output diff --git a/tests/cli/test_options.py b/tests/cli/test_options.py index 3306706e..63ba696f 100644 --- a/tests/cli/test_options.py +++ b/tests/cli/test_options.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# pylint: disable=redefined-outer-name,no-self-use +# pylint: disable=redefined-outer-name """Tests for the :mod:`aiida_common_workflows.cli.launch` module.""" import json import pathlib diff --git a/tests/conftest.py b/tests/conftest.py index acfaa382..976f71e9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,6 +3,7 @@ """Configuration and fixtures for unit test suite.""" import io import os +import pathlib import tempfile from aiida import engine @@ -23,7 +24,7 @@ def with_database(aiida_profile): @pytest.fixture def with_clean_database(with_database): """Fixture to clear the database before yielding to the test.""" - with_database.reset_db() + with_database.clear_profile() yield @@ -105,7 +106,8 @@ def generate_structure(): def _generate_structure(symbols=None): from aiida.plugins import DataFactory - structure = DataFactory('structure')() + structure = DataFactory('core.structure')() + structure.set_cell([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) # Set an arbitrary cell so the volume is not zero valid_symbols = [value['symbol'] for value in elements.values()] @@ -134,8 +136,8 @@ def _generate_code(entry_point): aiida_localhost.set_default_mpiprocs_per_machine(1) label = ''.join(random.choice(string.ascii_letters) for _ in range(16)) - code = DataFactory('code')( - label=label, input_plugin_name=entry_point, remote_computer_exec=[aiida_localhost, '/bin/bash'] + code = DataFactory('core.code.installed')( + label=label, default_calc_job_plugin=entry_point, computer=aiida_localhost, filepath_executable='/bin/bash' ) return code @@ -178,15 +180,17 @@ def _generate_eos_node(include_magnetization=True, include_energy=True): for index in range(5): structure = generate_structure().store() - structure.add_incoming(node, link_type=LinkType.RETURN, link_label=f'structures__{index}') + structure.base.links.add_incoming(node, link_type=LinkType.RETURN, link_label=f'structures__{index}') if include_energy: energy = Float(index).store() - energy.add_incoming(node, link_type=LinkType.RETURN, link_label=f'total_energies__{index}') + energy.base.links.add_incoming(node, link_type=LinkType.RETURN, link_label=f'total_energies__{index}') if include_magnetization: magnetization = Float(index).store() - magnetization.add_incoming(node, link_type=LinkType.RETURN, link_label=f'total_magnetizations__{index}') + magnetization.base.links.add_incoming( + node, link_type=LinkType.RETURN, link_label=f'total_magnetizations__{index}' + ) node.set_exit_status(0) @@ -207,16 +211,18 @@ def _generate_dissociation_curve_node(include_magnetization=True, include_energy for index in range(5): distance = Float(index / 10).store() - distance.add_incoming(node, link_type=LinkType.RETURN, link_label=f'distances__{index}') + distance.base.links.add_incoming(node, link_type=LinkType.RETURN, link_label=f'distances__{index}') # `include_energy` can be set to False to test cases with missing outputs if include_energy: energy = Float(index).store() - energy.add_incoming(node, link_type=LinkType.RETURN, link_label=f'total_energies__{index}') + energy.base.links.add_incoming(node, link_type=LinkType.RETURN, link_label=f'total_energies__{index}') if include_magnetization: magnetization = Float(index).store() - magnetization.add_incoming(node, link_type=LinkType.RETURN, link_label=f'total_magnetizations__{index}') + magnetization.base.links.add_incoming( + node, link_type=LinkType.RETURN, link_label=f'total_magnetizations__{index}' + ) node.set_exit_status(0) @@ -252,7 +258,7 @@ def _generate_psml_data(element): content = dedent( f""" - + """ ) @@ -306,10 +312,10 @@ def sssp(generate_upf_data): from aiida.plugins import GroupFactory SsspFamily = GroupFactory('pseudo.family.sssp') # pylint: disable=invalid-name - label = 'SSSP/1.1/PBE/efficiency' + label = 'SSSP/1.2/PBEsol/efficiency' try: - family = SsspFamily.objects.get(label=label) + family = SsspFamily.collection.get(label=label) except exceptions.NotExistent: pass else: @@ -331,7 +337,7 @@ def sssp(generate_upf_data): cutoffs_dict['normal'][element] = {'cutoff_wfc': 30., 'cutoff_rho': 240.} - family = SsspFamily.create_from_folder(dirpath, label) + family = SsspFamily.create_from_folder(pathlib.Path(dirpath), label) for stringency, cutoffs in cutoffs_dict.items(): family.set_cutoffs(cutoffs, stringency, unit='Ry') @@ -348,7 +354,7 @@ def pseudo_dojo_jthxml_family(generate_jthxml_data): label = 'PseudoDojo/1.0/PBE/SR/standard/jthxml' try: - family = PseudoDojoFamily.objects.get(label=label) + family = PseudoDojoFamily.collection.get(label=label) except exceptions.NotExistent: pass else: @@ -370,7 +376,9 @@ def pseudo_dojo_jthxml_family(generate_jthxml_data): cutoffs_dict['normal'][element] = {'cutoff_wfc': 30., 'cutoff_rho': 240.} - family = PseudoDojoFamily.create_from_folder(dirpath, label, pseudo_type=plugins.DataFactory('pseudo.jthxml')) + family = PseudoDojoFamily.create_from_folder( + pathlib.Path(dirpath), label, pseudo_type=plugins.DataFactory('pseudo.jthxml') + ) for stringency, cutoffs in cutoffs_dict.items(): family.set_cutoffs(cutoffs, stringency, unit='Eh') @@ -387,7 +395,7 @@ def pseudo_dojo_psp8_family(generate_psp8_data): # pylint: disable=too-many-loc label = 'PseudoDojo/0.41/PBE/SR/standard/psp8' try: - family = PseudoDojoFamily.objects.get(label=label) + family = PseudoDojoFamily.collection.get(label=label) except exceptions.NotExistent: pass else: @@ -409,7 +417,9 @@ def pseudo_dojo_psp8_family(generate_psp8_data): # pylint: disable=too-many-loc cutoffs_dict['normal'][element] = {'cutoff_wfc': 30., 'cutoff_rho': 240.} - family = PseudoDojoFamily.create_from_folder(dirpath, label, pseudo_type=plugins.DataFactory('pseudo.psp8')) + family = PseudoDojoFamily.create_from_folder( + pathlib.Path(dirpath), label, pseudo_type=plugins.DataFactory('pseudo.psp8') + ) for stringency, cutoffs in cutoffs_dict.items(): family.set_cutoffs(cutoffs, stringency, unit='Eh') @@ -427,7 +437,7 @@ def psml_family(generate_psml_data): label = 'PseudoDojo/0.4/PBE/FR/standard/psml' try: - family = PseudoPotentialFamily.objects.get(label=label) + family = PseudoPotentialFamily.collection.get(label=label) except exceptions.NotExistent: pass else: @@ -445,6 +455,6 @@ def psml_family(generate_psml_data): handle.write(source.read()) handle.flush() - family = PseudoPotentialFamily.create_from_folder(dirpath, label, pseudo_type=PsmlData) + family = PseudoPotentialFamily.create_from_folder(pathlib.Path(dirpath), label, pseudo_type=PsmlData) return family diff --git a/tests/workflows/bands/test_implementations.py b/tests/workflows/bands/test_implementations.py index d398dd0b..bd8f42cc 100644 --- a/tests/workflows/bands/test_implementations.py +++ b/tests/workflows/bands/test_implementations.py @@ -22,7 +22,7 @@ def test_spec(workchain): required_ports = { 'bands_kpoints': { - 'valid_type': plugins.DataFactory('array.kpoints') + 'valid_type': plugins.DataFactory('core.array.kpoints') }, 'parent_folder': { 'valid_type': orm.RemoteData diff --git a/tests/workflows/relax/test_castep.py b/tests/workflows/relax/test_castep.py index f3035de3..f6ec4db1 100644 --- a/tests/workflows/relax/test_castep.py +++ b/tests/workflows/relax/test_castep.py @@ -132,7 +132,7 @@ def test_calc_generator(nacl, castep_code, with_otfg): } } override = {'calc': {'parameters': {'cut_off_energy': 220}}} - otfg = OTFGGroup.objects.get(label='C19') + otfg = OTFGGroup.collection.get(label='C19') generated = generate_inputs_calculation(protcol, castep_code, nacl, otfg, override) assert 'structure' in generated @@ -154,7 +154,7 @@ def test_base_generator(castep_code, nacl, with_otfg): } } override = {'calc': {'parameters': {'cut_off_energy': 220}, 'metadata': {'label': 'test'}}} - otfg = OTFGGroup.objects.get(label='C19') + otfg = OTFGGroup.collection.get(label='C19') generated = generate_inputs_base(protcol, castep_code, nacl, otfg, override) assert 'structure' in generated['calc'] @@ -185,7 +185,7 @@ def test_relax_generator(castep_code, nacl, with_otfg): } } } - otfg = OTFGGroup.objects.get(label='C19') + otfg = OTFGGroup.collection.get(label='C19') generated = generate_inputs_relax(protocol, castep_code, nacl, otfg, override) assert 'structure' in generated @@ -249,16 +249,16 @@ def test_otfg_upload(with_otfg): # Initial upload ensure_otfg_family('C19V2') - assert OTFGGroup.objects.get(label='C19V2') + assert OTFGGroup.collection.get(label='C19V2') # Second call should not error ensure_otfg_family('C19V2') - assert OTFGGroup.objects.get(label='C19V2') + assert OTFGGroup.collection.get(label='C19V2') # Second call with forced update ensure_otfg_family('C19V2', force_update=True) - group = OTFGGroup.objects.get(label='C19V2') + group = OTFGGroup.collection.get(label='C19V2') found = False for node in group.nodes: if node.element == 'La': diff --git a/tests/workflows/relax/test_implementations.py b/tests/workflows/relax/test_implementations.py index 683c4f40..ac15de6c 100644 --- a/tests/workflows/relax/test_implementations.py +++ b/tests/workflows/relax/test_implementations.py @@ -23,7 +23,7 @@ def test_spec(workchain): required_ports = { 'structure': { - 'valid_type': plugins.DataFactory('structure') + 'valid_type': plugins.DataFactory('core.structure') }, 'protocol': { 'valid_type': str diff --git a/tests/workflows/relax/test_quantum_espresso.py b/tests/workflows/relax/test_quantum_espresso.py index 0ec8f319..7eb20a0c 100644 --- a/tests/workflows/relax/test_quantum_espresso.py +++ b/tests/workflows/relax/test_quantum_espresso.py @@ -101,11 +101,11 @@ def test_relax_type(generate_code, generate_structure): builder = generator.get_builder(structure=structure, engines=engines, relax_type=RelaxType.NONE) assert builder['base']['pw']['parameters']['CONTROL']['calculation'] == 'scf' - assert 'CELL' not in builder['base']['pw']['parameters'].attributes + assert 'CELL' not in builder['base']['pw']['parameters'].base.attributes.all builder = generator.get_builder(structure=structure, engines=engines, relax_type=RelaxType.POSITIONS) assert builder['base']['pw']['parameters']['CONTROL']['calculation'] == 'relax' - assert 'CELL' not in builder['base']['pw']['parameters'].attributes + assert 'CELL' not in builder['base']['pw']['parameters'].base.attributes.all builder = generator.get_builder(structure=structure, engines=engines, relax_type=RelaxType.CELL) assert builder['base']['pw']['parameters']['CONTROL']['calculation'] == 'vc-relax'