Skip to content

Commit

Permalink
Merge branch 'test_system' of github.com:juraskov/mlp-train into test…
Browse files Browse the repository at this point in the history
…_system
  • Loading branch information
juraskov committed Nov 25, 2024
2 parents 6234495 + 68ffe24 commit 6a232e6
Show file tree
Hide file tree
Showing 5 changed files with 262 additions and 18 deletions.
9 changes: 4 additions & 5 deletions environment_mace.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,11 @@ dependencies:
- scipy
- xtb
- scikit-learn
- openmm
- openmm-torch
- openmm=8.1.2
- openmm-torch=1.4
- nnpops
- openmm-ml
- openmm-ml=1.2
- git
- openmm-ml
- pip:
- ase@git+https://gitlab.com/ase/ase.git@f2615a6e9a # For PLUMED
- mace-torch
- mace-torch==0.3.6
25 changes: 16 additions & 9 deletions mlptrain/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,23 @@


class _ConfigClass:
"""mlptrain configuration"""
"""
MLP training configurations
This class contains default parameters for electronic structure computations and training of available MLPs.
Default settings for electronic structures is None to avoid accidentally running the wrong level of theory.
The desired level can be specified by, e.g.
```
from mlptrain.config import Config
Config.orca_keywords = ['PBE', 'def2-SVP', 'EnGrad']
Config.gaussian_keywords = ['PBEPBE', 'Def2SVP', 'Force(NoStep)', 'integral=ultrafinegrid']
```
"""

n_cores = 4
_orca_keywords = ['PBE', 'def2-SVP', 'EnGrad']
_gaussian_keywords = [
'PBEPBE',
'Def2SVP',
'Force(NoStep)',
'integral=ultrafinegrid',
]
_orca_keywords = None
_gaussian_keywords = None

# Default parameters for a GAP potential
gap_default_params = {
Expand Down Expand Up @@ -87,7 +94,7 @@ def gaussian_keywords(self):

@gaussian_keywords.setter
def gaussian_keywords(self, value):
"""ORCA keywords must be gradient"""
"""Gaussian keywords must be gradient"""
self._gaussian_keywords = value


Expand Down
4 changes: 2 additions & 2 deletions mlptrain/configurations/calculate.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def _method_and_keywords(
def _orca_keywords() -> 'autode.wrappers.keywords.Keywords':
"""Keywords e.g. functional and basis set to use for an ORCA calculation"""

if Config.orca_keywords is None:
if len(Config.orca_keywords) == 0:
raise ValueError(
'For ORCA training GTConfig.orca_keywords must be'
' set. e.g.\nmlt.Config.orca_keywords '
Expand All @@ -101,7 +101,7 @@ def _gaussian_keywords() -> 'autode.wrappers.keywords.Keywords':
"""Keywords e.g. functional and basis set to use for an Gaussian
calculation, either Gaussian09 or Gaussian16"""

if Config.gaussian_keywords is None:
if len(Config.gaussian_keywords) == 0:
raise ValueError(
'To train with Gaussian QM calculations '
'mlt.Config.gaussian_keywords must be set.'
Expand Down
31 changes: 29 additions & 2 deletions mlptrain/training/active.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from mlptrain.training.selection import SelectionMethod, AbsDiffE
from mlptrain.configurations import ConfigurationSet
from mlptrain.log import logger
from mlptrain.box import Box


def train(
Expand All @@ -38,6 +39,8 @@ def train(
constraints: Optional[List] = None,
bias: Optional = None,
md_program: str = 'ASE',
pbc: bool = False,
box_size: Optional[list] = None,
) -> None:
"""
Train a system using active learning, by propagating dynamics using ML
Expand Down Expand Up @@ -130,6 +133,13 @@ def train(
dynamics
md_program: (str) 'ASE' or 'OpenMM'
pbc: (bool) If True, MLP-MD propagates with periodic boundary conditions.
The solvent should be therefore placed ina box and not sphere.
The training data are still treated as clusters in
electronic structure computations.
box_size: (List | None) Size of the box where MLP-MD propogated.
"""
if md_program.lower() == 'openmm':
if not isinstance(mlp, mlptrain.potentials.MACE):
Expand All @@ -145,6 +155,9 @@ def train(

_check_bias(bias=bias, temp=temp, inherit_metad_bias=inherit_metad_bias)

if pbc and box_size is None:
raise ValueError('For PBC in MD, the box_size cannot be None')

if restart_iter is not None:
_initialise_restart(
mlp=mlp,
Expand Down Expand Up @@ -179,6 +192,10 @@ def train(
for iteration in range(max_active_iters):
if restart_iter is not None and iteration <= restart_iter:
continue
if isinstance(bias, PlumedBias) and iteration > bias_start_iter:
extra_time = 0
else:
extra_time = mlp.training_data.t_min(-n_configs_iter)

previous_n_train = mlp.n_train

Expand All @@ -203,13 +220,15 @@ def train(
bbond_energy=bbond_energy,
fbond_energy=fbond_energy,
init_temp=init_active_temp,
extra_time=mlp.training_data.t_min(-n_configs_iter),
extra_time=extra_time,
constraints=constraints,
bias=deepcopy(bias),
inherit_metad_bias=inherit_metad_bias,
bias_start_iter=bias_start_iter,
iteration=iteration,
md_program=md_program,
pbc=pbc,
box_size=box_size,
)

# Active learning finds no configurations
Expand Down Expand Up @@ -399,6 +418,9 @@ def _gen_active_config(
else kwargs.pop('init_active_temp')
)

pbc = False if 'pbc' not in kwargs else kwargs.pop('pbc')
box_size = None if 'box_size' not in kwargs else kwargs.pop('box_size')

if extra_time > 0:
logger.info(f'Running an extra {extra_time:.1f} fs of MD')

Expand All @@ -410,6 +432,8 @@ def _gen_active_config(
):
kwargs = _modify_kwargs_for_metad_bias_inheritance(kwargs)

if pbc:
config.box = Box(box_size)
if kwargs['md_program'].lower() == 'openmm':
traj = run_mlp_md_openmm(
config,
Expand All @@ -435,6 +459,8 @@ def _gen_active_config(

traj.t0 = curr_time # Increment the initial time (t0)

for frame in traj:
frame.box = Box([100, 100, 100])
# Evaluate the selector on the final frame
selector(traj.final_frame, mlp, method_name=method_name, n_cores=n_cores)

Expand Down Expand Up @@ -584,6 +610,7 @@ def _gen_and_set_init_training_configs(
config = mlp.system.random_configuration(
min_dist=dist, with_intra=True
)
config.box = Box([100, 100, 100])
init_configs.append(config)

except RuntimeError:
Expand All @@ -592,7 +619,7 @@ def _gen_and_set_init_training_configs(
logger.info(f'Added {num} configurations with min dist = {dist:.3f} Å')
init_configs.single_point(method_name)
mlp.training_data += init_configs
return None
return init_configs


def _initialise_restart(
Expand Down
Loading

0 comments on commit 6a232e6

Please sign in to comment.