Skip to content

Commit

Permalink
Fix release CI (#314)
Browse files Browse the repository at this point in the history
* Add OpenBabel installation step to pytest workflow

* add pmg to dev deps

* Add workflow_dispatch and workflow_call trigger to test.yml

* DRY: reuse test.yml workflow in release.yml and use modern python -m build over wheel

* bump conda-incubator/setup-miniconda to v3

* fix doc strings missing arguments

* consistently use f-str in filepath construction in QCJob

* properly deprecate ScanMetalHandler using monty.dev.deprecated decorator

* doc str white space normalization

* fix test test_check_with_non_kspacing_wf, failing due to added class decorator
  • Loading branch information
janosh authored Feb 15, 2024
1 parent 5148246 commit 99227c2
Show file tree
Hide file tree
Showing 21 changed files with 119 additions and 141 deletions.
43 changes: 10 additions & 33 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,44 +6,21 @@ on:
workflow_dispatch:

jobs:
build:
strategy:
max-parallel: 1
matrix:
os: [ubuntu-latest]
python-version: ["3.11"]

runs-on: ${{ matrix.os }}
test:
# run test.yml first to ensure that the test suite is passing
uses: ./.github/workflows/test.yml

build:
needs: test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- uses: conda-incubator/setup-miniconda@v2
with:
python-version: ${{ matrix.python-version }}

- name: Install OpenBabel
shell: bash -l {0}
run: conda install -c conda-forge openbabel

- name: Install dependencies
shell: bash -l {0}
run: |
pip install pytest pymatgen
pip install -e .
- name: pytest
shell: bash -l {0}
env:
PMG_MAPI_KEY: ${{ secrets.PMG_MAPI_KEY }}
MPLBACKEND: "Agg"
run: |
pytest tests --color=yes
- name: release
shell: bash -l {0}
run: |
pip install build twine
python -m build
twine upload --skip-existing dist/*.tar.gz
env:
TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
run: |
pip install setuptools wheel twine
python setup.py sdist bdist_wheel
twine upload --skip-existing dist/*.tar.gz
16 changes: 12 additions & 4 deletions .github/workflows/pytest.yml → .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ on:
pull_request:
branches: [master]
paths-ignore: [docs/**]
workflow_dispatch:
# make this workflow reusable by release.yml
workflow_call:

jobs:
build:
Expand All @@ -21,15 +24,20 @@ jobs:
steps:
- uses: actions/checkout@v4

- uses: conda-incubator/setup-miniconda@v3
with:
python-version: ${{ matrix.python-version }}

- name: Install OpenBabel
shell: bash -l {0}
run: conda install -c conda-forge openbabel

- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}

- name: Install dependencies
run: |
pip install --upgrade pip
pip install --upgrade pymatgen
pip install -e '.[dev]'
run: pip install -e '.[dev]'

- name: pytest
env:
Expand Down
4 changes: 2 additions & 2 deletions custodian/ansible/actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ def get_nested_dict(input_dict, key):


class DictActions:
"""
Class to implement the supported mongo-like modifications on a dict.
"""Class to implement the supported mongo-like modifications on a dict.
Supported keywords include the following Mongo-based keywords, with the
usual meanings (refer to Mongo documentation for information):
Expand Down
3 changes: 1 addition & 2 deletions custodian/ansible/interpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,7 @@ class Modder:
"""

def __init__(self, actions=None, strict=True):
"""
Initializes a Modder from a list of supported actions.
"""Initialize a Modder from a list of supported actions.
Args:
actions ([Action]): A sequence of supported actions. See
Expand Down
42 changes: 21 additions & 21 deletions custodian/cp2k/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,7 @@ class StdErrHandler(ErrorHandler):
error_msgs = {"seg_fault": ["SIGSEGV"], "out_of_memory": ["insufficient virtual memory"], "abort": ["SIGABRT"]}

def __init__(self, std_err="std_err.txt"):
"""
Initializes the handler with the output file to check.
"""Initialize the handler with the output file to check.
Args:
std_err (str): This is the file where the stderr for cp2k
Expand Down Expand Up @@ -135,8 +134,7 @@ class UnconvergedScfErrorHandler(ErrorHandler):
is_monitor = True

def __init__(self, input_file="cp2k.inp", output_file="cp2k.out"):
"""
Initializes the error handler from a set of input and output files.
"""Initialize the error handler from a set of input and output files.
Args:
input_file (str): Name of the CP2K input file.
Expand Down Expand Up @@ -404,11 +402,11 @@ class DivergingScfErrorHandler(ErrorHandler):
is_monitor = True

def __init__(self, output_file="cp2k.out", input_file="cp2k.inp"):
"""
Initializes the error handler from an output files.
"""Initializes the error handler from an output files.
Args:
output_file (str): Name of the CP2K output file.
input_file (str): Name of the CP2K input file.
"""
self.output_file = output_file
self.input_file = input_file
Expand Down Expand Up @@ -468,8 +466,7 @@ class FrozenJobErrorHandler(ErrorHandler):
is_monitor = True

def __init__(self, input_file="cp2k.inp", output_file="cp2k.out", timeout=3600):
"""
Initializes the handler with the output file to check.
"""Initialize the handler with the output file to check.
Args:
input_file (str): Name of the input file to modify if needed
Expand Down Expand Up @@ -592,11 +589,11 @@ def correct(self):


class AbortHandler(ErrorHandler):
"""
These are errors that cp2k recognizes internally, and causes a kill-signal,
as opposed to things like slow scf convergence, which is an unwanted feature of
optimization rather than an error per se. Currently this error handler recognizes
the following:
"""Handles errors that cp2k recognizes internally.
These internal errors cause a kill-signal, as opposed to things like slow scf
convergence, which is an unwanted feature of optimization rather than an error per se.
Currently this error handler recognizes the following:
(1) Cholesky decomposition error in preconditioner. If this is found, the
handler will try switching between Full_all/Full_single_inverse
Expand Down Expand Up @@ -745,14 +742,14 @@ def correct(self):


class NumericalPrecisionHandler(ErrorHandler):
"""
CP2K offers lots of functionality for decreasing numerical
precision in order to speed-up calculations. This can, unfortunately,
lead to convergence cycles getting 'stuck'. While it can be hard to
separate numerical issues from things like optimizer choice, slow-to-converge
systems, or divergence issues, this handler specifically detects the problem of
convergence getting stuck, where the same convergence value is returned many times
in a row. (Numerical precision can also be the cause of oscillating convergence.
"""This handler detects convergence cycles getting stuck due to numerical imprecision.
CP2K offers lots of functionality for decreasing numerical precision in order to
speed-up calculations. This can unfortunately lead to convergence cycles getting 'stuck'.
While it can be hard to separate numerical issues from things like optimizer choice,
slow-to-converge systems, or divergence issues, this handler specifically detects the
problem of convergence getting stuck, where the same convergence value is returned many
times in a row. (Numerical precision can also be the cause of oscillating convergence.
This is a little harder to assess, as it can also just look like slow-convergence.)
Currently, we have identified the following causes of this problem:
Expand Down Expand Up @@ -810,6 +807,9 @@ def __init__(
This will be caught and corrected, but it won't catch instances where the
last n-1 convergence values are the same for each outer scf loop, but it gets
reset by the preconditioner.
pgf_orb_strict (float): TODO @janosh someone who knows this code, please add a description
eps_default_strict (float): TODO @janosh likewise
eps_gvg_strict (float): TODO @janosh likewise
"""
self.input_file = input_file
self.output_file = output_file
Expand Down
5 changes: 2 additions & 3 deletions custodian/cp2k/interpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,7 @@ class Cp2kModder(Modder):
"""

def __init__(self, filename="cp2k.inp", actions=None, strict=True, ci=None):
"""
Initializes a Modder for Cp2kInput sets.
"""Initialize a Modder for Cp2kInput sets.
Args:
filename (str): name of cp2k input file to modify. This file will be overwritten
Expand All @@ -47,7 +46,7 @@ def apply_actions(self, actions):
files.
Args:
actions [dict]: A list of actions of the form {'file': filename,
actions (dict): A list of actions of the form {'file': filename,
'action': moddermodification} or {'dict': cp2k_key,
'action': moddermodification}.
"""
Expand Down
5 changes: 3 additions & 2 deletions custodian/cp2k/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,12 @@ def restart(actions, output_file, input_file, no_actions_needed=False):
no actions are present, then non are added by this function
output_file (str): the cp2k output file name.
input_file (str): the cp2k input file name.
no_actions_needed (bool): if no actions are needed, then this should be set to True.
"""
if actions or no_actions_needed:
o = Cp2kOutput(output_file)
out = Cp2kOutput(output_file)
ci = Cp2kInput.from_file(input_file)
restart_file = o.filenames.get("restart")
restart_file = out.filenames.get("restart")
restart_file = restart_file[-1] if restart_file else None
wfn_restart = ci["force_eval"]["dft"].get("wfn_restart_file_name") if ci.check("force_eval/dft") else None

Expand Down
14 changes: 7 additions & 7 deletions custodian/custodian.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,9 @@


class Custodian:
"""
The Custodian class is the manager for a list of jobs given a list of
error handlers. The way it works is as follows:
"""Custodian class is the manager for a list of jobs given a list of error handlers.
The way it works is as follows:
1. Let's say you have defined a list of jobs as [job1, job2, job3, ...] and
you have defined a list of possible error handlers as [err1, err2, ...]
Expand Down Expand Up @@ -124,8 +124,7 @@ def __init__(
terminate_on_nonzero_returncode=True,
**kwargs,
):
"""
Initializes a Custodian from a list of jobs and error handlers.
"""Initialize a Custodian from a list of jobs and error handlers.
Args:
handlers ([ErrorHandler]): Error handlers. In order of priority of
Expand Down Expand Up @@ -175,6 +174,8 @@ def __init__(
running job. If None, the default is to call Popen.terminate.
terminate_on_nonzero_returncode (bool): If True, a non-zero return
code on any Job will result in a termination. Defaults to True.
**kwargs: Any other kwargs are ignored. This is to allow for easy
subclassing and instantiation from a dict.
"""
self.max_errors = max_errors
self.max_errors_per_job = max_errors_per_job or max_errors
Expand Down Expand Up @@ -842,8 +843,7 @@ class CustodianError(RuntimeError):
"""Exception class for Custodian errors."""

def __init__(self, message, raises=False):
"""
Initializes the error with a message.
"""Initialize the error with a message.
Args:
message (str): Message passed to Exception
Expand Down
3 changes: 1 addition & 2 deletions custodian/feff/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,7 @@ class UnconvergedErrorHandler(ErrorHandler):
is_monitor = False

def __init__(self, output_filename="log1.dat"):
"""
Initializes the handler with the output file to check.
"""Initialize the handler with the output file to check.
Args:
output_filename (str): Filename for the log1.dat file. log1.dat file
Expand Down
2 changes: 1 addition & 1 deletion custodian/feff/interpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def apply_actions(self, actions):
files.
Args:
actions [dict]: A list of actions of the form {'file': filename,
actions (dict): A list of actions of the form {'file': filename,
'action': moddermodification} or {'dict': feffinput_key,
'action': moddermodification}
"""
Expand Down
3 changes: 1 addition & 2 deletions custodian/nwchem/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@ class NwchemErrorHandler(ErrorHandler):
"""

def __init__(self, output_filename="mol.nwout"):
"""
Initializes with an output file name.
"""Initialize with an output file name.
Args:
output_filename (str): This is the file where the stdout for nwchem
Expand Down
3 changes: 1 addition & 2 deletions custodian/nwchem/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,7 @@ def __init__(
backup=True,
settings_override=None,
):
"""
Initializes a basic NwChem job.
"""Initialize a basic NwChem job.
Args:
nwchem_cmd ([str]): Command to run Nwchem as a list of args. For
Expand Down
5 changes: 2 additions & 3 deletions custodian/qchem/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,7 @@ def __init__(
scf_max_cycles=100,
geom_max_cycles=200,
):
"""
Initializes the error handler from a set of input and output files.
"""Initialize the error handler from a set of input and output files.
Args:
input_file (str): Name of the QChem input file.
Expand Down Expand Up @@ -333,7 +332,7 @@ def correct(self):
actions.append({"mem_static": "2000"})

elif "mem_total_too_small" in self.errors:
print("Run on a node with more memory! Current mem_total = " + str(self.outdata["mem_total"]))
print(f"Run on a node with more memory! Current mem_total = {self.outdata['mem_total']}")
return {"errors": self.errors, "actions": None}

elif "basis_not_supported" in self.errors:
Expand Down
Loading

0 comments on commit 99227c2

Please sign in to comment.