diff --git a/.codacy.yml b/.codacy.yml index 37c328e7..9e790d7d 100644 --- a/.codacy.yml +++ b/.codacy.yml @@ -3,13 +3,16 @@ engines: flake8: enabled: true exclude_paths: + - "pyproject.toml" - "setup.py" - "versioneer.py" + - "genconf.py" - "ams/pypower/**" - "ams/_version.py" - ".github/**" - "README.md" - - "CODE_OF_CONDUCT.md" + - "cases/README.md" + - "CODE_OF_CONDUCT.md"` - "tests/**" - "docs/**" - "examples/**" \ No newline at end of file diff --git a/.codecov.yml b/.codecov.yml index 266fe31a..ed292896 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -15,7 +15,9 @@ cli: codecov: token: 5fc2d41f-bcba-455d-a843-f85beb76d28f ignore: + - "pyproject.toml" - "setup.py" + - "genconf.py" - "versioneer.py" - "ams/pypower/**" - "ams/_version.py" diff --git a/.flake8 b/.flake8 index 48c00266..893e1277 100644 --- a/.flake8 +++ b/.flake8 @@ -1,3 +1,3 @@ [flake8] -exclude = .git,__pycache__,build,dist,demos,cases,dev,ams/solver/pypower/,cards,versioneer.py,ams/_version.py,docs/source/conf.py,docs/source/_build,benchmarks,andes/pycode,scripts,.history,ams/pypower, +exclude = .git,__pycache__,build,dist,demos,cases,dev,ams/solver/pypower/,cards,versioneer.py,ams/_version.py,docs/source/conf.py,docs/source/_build,benchmarks,andes/pycode,scripts,.history,ams/pypower,icebar/* max-line-length=115 \ No newline at end of file diff --git a/.github/workflows/codecov.yml b/.github/workflows/codecov.yml index 2a778bb5..93b82e0b 100644 --- a/.github/workflows/codecov.yml +++ b/.github/workflows/codecov.yml @@ -24,6 +24,7 @@ jobs: run: | mamba install -y nbmake pytest-xdist line_profiler # add'l packages for notebook tests. mamba install --file requirements.txt --file requirements-extra.txt + python -m pip install pyscipopt python -m pip install -e . - shell: bash -el {0} name: Test with pytest and collect coverage diff --git a/.github/workflows/pythonapp.yml b/.github/workflows/pythonapp.yml index 59b6d71e..11cb14dd 100644 --- a/.github/workflows/pythonapp.yml +++ b/.github/workflows/pythonapp.yml @@ -3,12 +3,49 @@ name: Python application on: [push, pull_request] jobs: + pretest: + name: AMS Pretest + + runs-on: ubuntu-latest + + if: ${{ github.ref != 'refs/heads/misc' }} + + steps: + - uses: actions/checkout@v4 + - uses: conda-incubator/setup-miniconda@v3 + with: + python-version: 3.11 + mamba-version: "*" + miniforge-version: "latest" + channels: conda-forge,defaults + channel-priority: true + activate-environment: anaconda-client-env + - shell: bash -el {0} + name: Install dependencies + run: | + mamba install -y nbmake pytest-xdist line_profiler # add'l packages for notebook tests. + mamba install --file requirements.txt --file requirements-extra.txt + python -m pip install pyscipopt + python -m pip install -e . + - shell: bash -el {0} + name: Run pip check + run: | + pip check || true # Allow pip check to fail without failing the job + - shell: bash -el {0} + name: Test with pytest + run: | + pytest + - shell: bash -el {0} + name: Test notebooks. + run: | + pytest --nbmake examples --ignore=examples/verification + build: name: AMS Tests strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.9, 3.10, 3.11, 3.12] + python-version: [3.9, 3.10, 3.11, 3.12, 3.13] runs-on: ubuntu-latest @@ -29,13 +66,16 @@ jobs: run: | mamba install -y nbmake pytest-xdist line_profiler # add'l packages for notebook tests. mamba install --file requirements.txt --file requirements-extra.txt + python -m pip install pyscipopt python -m pip install -e . - - name: Lint with flake8 for pull requests - if: github.event_name == 'pull_request' + - shell: bash -el {0} + name: Run pip check + run: | + pip check || true # Allow pip check to fail without failing the job + - shell: bash -el {0} + name: Test with pytest run: | - pip install flake8 # specify flake8 to avoid unknown error - # stop the build if there are Python syntax errors or undefined names - flake8 . + pytest - shell: bash -el {0} name: Test notebooks. run: | diff --git a/.readthedocs.yml b/.readthedocs.yml index c732d0ac..2fcec7aa 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -18,6 +18,4 @@ python: - method: pip path: . extra_requirements: - - doc - - method: setuptools - path: . \ No newline at end of file + - doc \ No newline at end of file diff --git a/CITATION.bib b/CITATION.bib new file mode 100644 index 00000000..f4b3a333 --- /dev/null +++ b/CITATION.bib @@ -0,0 +1,10 @@ +@article{andes_2021, + author = {Cui, Hantao and Li, Fangxing and Tomsovic, Kevin}, + journal = {IEEE Transactions on Power Systems}, + title = {Hybrid Symbolic-Numeric Framework for Power System Modeling and Analysis}, + year = {2021}, + volume = {36}, + number = {2}, + pages = {1373-1384}, + doi = {10.1109/TPWRS.2020.3017019} +} \ No newline at end of file diff --git a/LICENSE b/LICENSE index 6f4c393f..b9d6018a 100644 --- a/LICENSE +++ b/LICENSE @@ -670,7 +670,7 @@ Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: - AMS Copyright (C) 2023 Jinning Wang + AMS Copyright (C) 2023 - 2024 Jinning Wang This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. diff --git a/README.md b/README.md index 1cdf8380..29e6245d 100644 --- a/README.md +++ b/README.md @@ -82,6 +82,19 @@ Install from GitHub source: pip install git+https://github.com/CURENT/ams.git ``` +# Example Usage + +Using AMS to run a Real-Time Economic Dispatch (RTED) simulation: + +```python +import ams + +ss = ams.load(ams.get_case('ieee14_uced.xlsx')) +ss.RTED.run() + +print(ss.RTED.pg.v) +``` + # Sponsors and Contributors AMS is the scheduling simulation engine for the CURENT Largescale Testbed (LTB). More information about CURENT LTB can be found at the [LTB Repository][LTB Repository]. diff --git a/ams/__init__.py b/ams/__init__.py index 1b86b2ba..ae078bea 100644 --- a/ams/__init__.py +++ b/ams/__init__.py @@ -1,20 +1,13 @@ from . import _version __version__ = _version.get_versions()['version'] -from ams import io # NOQA -from ams import utils # NOQA -from ams import models # NOQA -from ams import system # NOQA -from ams import routines # NOQA -from ams import opt # NOQA -from ams import pypower # NOQA -from ams import report # NOQA -from ams import extension # NOQA +from ams import benchmarks # NOQA from ams.main import config_logger, load, run # NOQA -from ams.utils.paths import get_case # NOQA +from ams.system import System # NOQA +from ams.utils.paths import get_case, list_cases # NOQA from ams.shared import ppc2df # NOQA __author__ = 'Jining Wang' -__all__ = ['io', 'utils', 'models', 'system', 'extension'] +__all__ = ['System', 'get_case', 'System'] diff --git a/ams/benchmarks.py b/ams/benchmarks.py new file mode 100644 index 00000000..b80bf6cd --- /dev/null +++ b/ams/benchmarks.py @@ -0,0 +1,302 @@ +""" +Benchmark functions. +""" + +import datetime +import sys +import importlib.metadata as importlib_metadata +import logging + +try: + import pandapower as pdp + PANDAPOWER_AVAILABLE = True +except ImportError: + PANDAPOWER_AVAILABLE = False + logging.warning("pandapower is not available. Some functionalities will be disabled.") + +from andes.utils.misc import elapsed + +import ams + +logger = logging.getLogger(__name__) + +_failed_time = -1 +_failed_obj = -1 + +cols_pre = ['ams_mats', 'ams_parse', 'ams_eval', 'ams_final', 'ams_postinit'] + + +def get_tool_versions(tools=None): + """ + Get the current time, Python version, and versions of specified tools. + + Parameters + ---------- + tools : list of str, optional + List of tool names to check versions. If None, a default list of tools is used. + + Returns + ------- + dict + A dictionary containing the tool names and their versions. + """ + if tools is None: + tools = ['ltbams', 'andes', 'cvxpy', + 'gurobipy', 'mosek', 'piqp', + 'pandapower', 'numba'] + + # Get current time and Python version + last_run_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + python_version = sys.version + + # Collect tool versions + tool_versions = {} + for tool in tools: + try: + version = importlib_metadata.version(tool) + tool_versions[tool] = version + except importlib_metadata.PackageNotFoundError: + logger.error(f"Package {tool} not found.") + tool_versions[tool] = "Not installed" + + # Print the results in a formatted way + logger.warning(f"Last run time: {last_run_time}") + logger.warning(f"Python: {python_version}\n") + + # Calculate the width of the columns + max_tool_length = max(len(tool) for tool in tool_versions) + max_version_length = max(len(version) for version in tool_versions.values()) + + # Print the header + logger.warning(f"{'Tool':<{max_tool_length}} {'Version':<{max_version_length}}") + logger.warning(f"{'-' * max_tool_length} {'-' * max_version_length}") + + # Print each tool and its version + for tool, version in tool_versions.items(): + logger.warning(f"{tool:<{max_tool_length}} {version:<{max_version_length}}") + + return tool_versions + + +def time_routine_solve(system, routine='DCOPF', **kwargs): + """ + Run the specified routine with the given solver and method. + + Parameters + ---------- + system : ams.System + The system object containing the routine. + routine : str, optional + The name of the routine to run. Defaults to 'DCOPF'. + + Other Parameters + ---------------- + solver : str, optional + The solver to use. + ignore_dpp : bool, optional + Whether to ignore DPP. Defaults to True. + + Returns + ------- + tuple + A tuple containing the elapsed time (s) and the objective value ($). + """ + rtn = system.routines[routine] + solver = kwargs.get('solver', None) + try: + t, _ = elapsed() + rtn.run(**kwargs) + _, s0 = elapsed(t) + elapsed_time = float(s0.split(' ')[0]) + obj_value = rtn.obj.v + except Exception as e: + logger.error(f"Error running routine {routine} with solver {solver}: {e}") + elapsed_time = _failed_time + obj_value = _failed_obj + return elapsed_time, obj_value + + +def pre_solve(system, routine): + """ + Time the routine preparation process. + + Parameters + ---------- + system : ams.System + The system object containing the routine. + routine : str + The name of the routine to prepare + + Returns + ------- + dict + A dictionary containing the preparation times in seconds for each step: + 'mats', 'parse', 'evaluate', 'finalize', 'postinit'. + """ + rtn = system.routines[routine] + + # Initialize AMS + # --- matrices build --- + t_mats, _ = elapsed() + system.mats.build(force=True) + _, s_mats = elapsed(t_mats) + + # --- code generation --- + t_parse, _ = elapsed() + rtn.om.parse(force=True) + _, s_parse = elapsed(t_parse) + + # --- code evaluation --- + t_evaluate, _ = elapsed() + rtn.om.evaluate(force=True) + _, s_evaluate = elapsed(t_evaluate) + + # --- problem finalization --- + t_finalize, _ = elapsed() + rtn.om.finalize(force=True) + _, s_finalize = elapsed(t_finalize) + + # --- rest init process --- + t_postinit, _ = elapsed() + rtn.init() + _, s_postinit = elapsed(t_postinit) + + s_float = [float(s.split(' ')[0]) for s in [s_mats, s_parse, s_evaluate, s_finalize, s_postinit]] + + pre_time = dict(zip(cols_pre, s_float)) + return pre_time + + +def time_pdp_dcopf(ppn): + """ + Test the execution time of DCOPF using pandapower. + + Parameters + ---------- + ppn : pandapowerNet + The pandapower network object. + + Returns + ------- + tuple + A tuple containing the elapsed time (s) and the objective value ($). + """ + try: + t_pdp, _ = elapsed() + pdp.rundcopp(ppn) + _, s_pdp = elapsed(t_pdp) + elapsed_time = float(s_pdp.split(' ')[0]) + obj_value = ppn.res_cost + except Exception as e: + logger.error(f"Error running pandapower: {e}") + elapsed_time = _failed_time + obj_value = _failed_obj + return elapsed_time, obj_value + + +def time_routine(system, routine='DCOPF', solvers=['CLARABEL'], + **kwargs): + """ + Time the specified routine with the given solvers. + + Parameters + ---------- + system : ams.System + The system object containing the routine. + routine : str, optional + The name of the routine to run. Defaults to 'DCOPF'. + solvers : list of str, optional + List of solvers to use. Defaults to ['CLARABEL']. + + Other Parameters + ---------------- + ignore_dpp : bool, optional + Whether to ignore DPP. Defaults to True. + + Returns + ------- + tuple + A tuple containing the preparation times and the solution times in + seconds for each solver. + """ + pre_time = pre_solve(system, routine) + sol = {f'{solver}': {'time': 0, 'obj': 0} for solver in solvers} + + for solver in solvers: + if solver != 'pandapower': + s, obj = time_routine_solve(system, routine, solver=solver, **kwargs) + sol[solver]['time'] = s + sol[solver]['obj'] = obj + elif solver == 'pandapower' and PANDAPOWER_AVAILABLE and routine == 'DCOPF': + ppc = ams.io.pypower.system2ppc(system) + ppn = pdp.converter.from_ppc(ppc, f_hz=system.config.freq) + s, obj = time_pdp_dcopf(ppn) + sol[solver]['time'] = s + sol[solver]['obj'] = obj + else: + sol[solver]['time'] = _failed_time + sol[solver]['obj'] = _failed_obj + + return pre_time, sol + + +def time_dcopf_with_lf(system, solvers=['CLARABEL'], load_factors=[1], ignore_dpp=False): + """ + Time the execution of DCOPF with varying load factors. + + Parameters + ---------- + system : ams.System + The system object containing the routine. + solvers : list of str, optional + List of solvers to use. Defaults to ['CLARABEL']. + load_factors : list of float, optional + List of load factors to apply. Defaults to None. + ignore_dpp : bool, optional + Whether to ignore DPP. + + Returns + ------- + tuple + A tuple containing the list of times and the list of objective values. + """ + pre_time = pre_solve(system, 'DCOPF') + sol = {f'{solver}': {'time': 0, 'obj': 0} for solver in solvers} + + pd0 = system.PQ.p0.v.copy() + pq_idx = system.PQ.idx.v + + for solver in solvers: + if solver != 'pandapower': + obj_all = 0 + t_all, _ = elapsed() + for lf_k in load_factors: + system.PQ.set(src='p0', attr='v', idx=pq_idx, value=lf_k * pd0) + system.DCOPF.update(params=['pd']) + _, obj = time_routine_solve(system, 'DCOPF', + solver=solver, ignore_dpp=ignore_dpp) + obj_all += obj + _, s_all = elapsed(t_all) + system.PQ.set(src='p0', attr='v', idx=pq_idx, value=pd0) + s = float(s_all.split(' ')[0]) + sol[solver]['time'] = s + sol[solver]['obj'] = obj_all + elif solver == 'pandapower' and PANDAPOWER_AVAILABLE: + ppc = ams.io.pypower.system2ppc(system) + ppn = pdp.converter.from_ppc(ppc, f_hz=system.config.freq) + p_mw0 = ppn.load['p_mw'].copy() + t_all, _ = elapsed() + obj_all = 0 + for lf_k in load_factors: + ppn.load['p_mw'] = lf_k * p_mw0 + _, obj = time_pdp_dcopf(ppn) + obj_all += obj + _, s_all = elapsed(t_all) + s = float(s_all.split(' ')[0]) + sol[solver]['time'] = s + sol[solver]['obj'] = obj_all + else: + sol[solver]['time'] = _failed_time + sol[solver]['obj'] = _failed_obj + + return pre_time, sol diff --git a/ams/cases/5bus/pjm5bus_jumper.xlsx b/ams/cases/5bus/pjm5bus_jumper.xlsx new file mode 100644 index 00000000..ce398420 Binary files /dev/null and b/ams/cases/5bus/pjm5bus_jumper.xlsx differ diff --git a/ams/cases/5bus/pjm5bus_uced_esd1.xlsx b/ams/cases/5bus/pjm5bus_uced_esd1.xlsx index d2ec3f81..18b90430 100644 Binary files a/ams/cases/5bus/pjm5bus_uced_esd1.xlsx and b/ams/cases/5bus/pjm5bus_uced_esd1.xlsx differ diff --git a/ams/cases/README.md b/ams/cases/README.md new file mode 100644 index 00000000..8ad62666 --- /dev/null +++ b/ams/cases/README.md @@ -0,0 +1,85 @@ +# Cases + +This folder contains various power system case studies used for scheduling studies. Each subfolder holds a different power system model with multiple variants. + +## 5bus + +PJM 5-bus system [5bus](#references) + +- `pjm5bus_demo.xlsx`: Demo case for the 5-bus system. +- `pjm5bus_jumper.xlsx`: Jumper case for the 5-bus system. +- `pjm5bus_uced.json`: UCED case in JSON format. +- `pjm5bus_uced.xlsx`: UCED case in Excel format. +- `pjm5bus_uced_esd1.xlsx`: UCED case with energy storage device. +- `pjm5bus_uced_ev.xlsx`: UCED case with electric vehicles. + +## ieee14 + +IEEE 14-bus system [pstca](#references) + +- `ieee14.json`: JSON format of the IEEE 14-bus system. +- `ieee14.raw`: Raw power flow data for the IEEE 14-bus system. +- `ieee14_uced.xlsx`: UCED case for the IEEE 14-bus system. + +## ieee39 + +IEEE 39-bus system [pstca](#references) + +- `ieee39.xlsx`: Base case for the IEEE 39-bus system. +- `ieee39_uced.xlsx`: UCED case for the IEEE 39-bus system. +- `ieee39_uced_esd1.xlsx`: UCED case with energy storage device. +- `ieee39_uced_pvd1.xlsx`: UCED case with photovoltaic device. +- `ieee39_uced_vis.xlsx`: Visualization case for the IEEE 39-bus system. + +## ieee123 + +IEEE 123-bus system [pstca](#references) + +- `ieee123.xlsx`: Base case for the IEEE 123-bus system. +- `ieee123_regcv1.xlsx`: Case with regulator control version 1. + +## matpower + +Cases from Matpower [matpower](#references) + +- `case5.m`: Matpower case for the 5-bus system. +- `case14.m`: Matpower case for the 14-bus system. +- `case39.m`: Matpower case for the 39-bus system. +- `case118.m`: Matpower case for the 118-bus system. +- `case300.m`: Matpower case for the 300-bus system. +- `case_ACTIVSg2000.m`: Matpower case for the ACTIVSg2000 system. +- `benchmark.json`: Benchmark results, NOT a power system case. + +## npcc + +Northeast Power Coordinating Council system [SciData](#references) + +- `npcc.m`: Matpower case for the NPCC system. +- `npcc_uced.xlsx`: UCED case for the NPCC system. + +## wecc + +Western Electricity Coordinating Council system [SciData](#references) + +- `wecc.m`: Matpower case for the WECC system. +- `wecc_uced.xlsx`: UCED case for the WECC system. + +## pglib + +Cases from the Power Grid Lib - Optimal Power Flow [pglib](#references) + +- `pglib_opf_case39_epri__api.m`: PGLib case for the 39-bus system. + +--- + +## References + +[5bus]: F. Li and R. Bo, "Small test systems for power system economic studies," IEEE PES General Meeting, 2010, pp. 1-4, doi: 10.1109/PES.2010.5589973. + +[pstca]: “Power Systems Test Case Archive - UWEE,” labs.ece.uw.edu. https://labs.ece.uw.edu/pstca/ + +[matpower]: R. D. Zimmerman, C. E. Murillo-Sanchez, and R. J. Thomas, "MATPOWER: Steady-State Operations, Planning and Analysis Tools for Power Systems Research and Education," Power Systems, IEEE Transactions on, vol. 26, no. 1, pp. 12-19, Feb. 2011. doi: 10.1109/TPWRS.2010.2051168 + +[SciData]: Q. Zhang and F. Li, “A Dataset for Electricity Market Studies on Western and Northeastern Power Grids in the United States,” Scientific Data, vol. 10, no. 1, p. 646, Sep. 2023, doi: 10.1038/s41597-023-02448-w. + +[pglib]: S. Babaeinejadsarookolaee et al., “The Power Grid Library for Benchmarking AC Optimal Power Flow Algorithms,” arXiv.org, 2019. https://arxiv.org/abs/1908.02788 \ No newline at end of file diff --git a/ams/cli.py b/ams/cli.py index 6b383f16..80721420 100644 --- a/ams/cli.py +++ b/ams/cli.py @@ -29,6 +29,12 @@ def create_parser(): ------- argparse.ArgumentParser Parser with all AMS options + + Notes + ----- + Revised from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ parser = argparse.ArgumentParser() diff --git a/ams/core/matprocessor.py b/ams/core/matprocessor.py index e4b27bcd..151dac3d 100644 --- a/ams/core/matprocessor.py +++ b/ams/core/matprocessor.py @@ -10,11 +10,11 @@ import numpy as np from andes.thirdparty.npfunc import safe_div -from andes.shared import pd, tqdm, tqdm_nb +from andes.shared import tqdm, tqdm_nb from andes.utils.misc import elapsed, is_notebook from ams.opt.omodel import Param -from ams.shared import sps +from ams.shared import pd, sps logger = logging.getLogger(__name__) @@ -186,19 +186,33 @@ def __init__(self, system): info='Line outage distribution factor', v=None, sparse=False, owner=self) - def build(self): + def build(self, force=False): """ Build the system matrices. It build connectivity matrices first: Cg, Cl, Csh, Cft, and CftT. Then build bus matrices: Bf, Bbus, Pfinj, and Pbusinj. + Parameters + ---------- + force : bool, optional + If True, force to rebuild the matrices. Default is False. + + Notes + ----- + Generator online status is NOT considered in its connectivity matrix. + The same applies for load, line, and shunt. + Returns ------- initialized : bool True if the matrices are built successfully. """ - t_mat, _ = elapsed() + if not force and self.initialized: + logger.debug("System matrices are already built.") + return self.initialized + t_mat, _ = elapsed() + logger.warning("Building system matrices") # --- connectivity matrices --- _ = self.build_cg() _ = self.build_cl() @@ -212,7 +226,7 @@ def build(self): _ = self.build_pbusinj() _, s_mat = elapsed(t_mat) - logger.debug(f"Built system matrices in {s_mat}.") + logger.debug(f" -> System matrices built in {s_mat}") self.initialized = True return self.initialized diff --git a/ams/core/param.py b/ams/core/param.py index d07eb094..50c0cc4c 100644 --- a/ams/core/param.py +++ b/ams/core/param.py @@ -139,7 +139,6 @@ def __init__(self, self.expand_dims = expand_dims self.no_parse = no_parse self.owner = None # instance of the owner model or group - self.rtn = None # instance of the owner routine self.is_ext = False # indicate if the value is set externally self._v = None # external value if v is not None: @@ -197,7 +196,12 @@ def shape(self): """ Return the shape of the parameter. """ - return np.shape(self.v) + if self.is_ext: + return np.shape(self._v) + elif self.no_parse: + return None + else: + return np.shape(self.v) @property def dtype(self): diff --git a/ams/core/service.py b/ams/core/service.py index 84d1ed26..d94ce5e8 100644 --- a/ams/core/service.py +++ b/ams/core/service.py @@ -178,7 +178,7 @@ def __init__(self, class LoadScale(ROperationService): """ - Return load. + Get zonal load by scale nodal load given the corresponding load scale factor. Parameters ---------- @@ -218,11 +218,12 @@ def __init__(self, def v(self): sys = self.rtn.system u_idx = self.u.get_idx() + ue = self.u.owner.get(src='u', attr='v', idx=u_idx) u_bus = self.u.owner.get(src='bus', attr='v', idx=u_idx) u_zone = sys.Bus.get(src='zone', attr='v', idx=u_bus) u_yloc = np.array(sys.Region.idx2uid(u_zone)) p0s = np.multiply(self.sd.v[:, u_yloc].transpose(), - self.u.v[:, np.newaxis]) + (ue * self.u.v)[:, np.newaxis]) if self.sparse: return spr.csr_matrix(p0s) return p0s diff --git a/ams/core/symprocessor.py b/ams/core/symprocessor.py index ba95b833..c1bc280e 100644 --- a/ams/core/symprocessor.py +++ b/ams/core/symprocessor.py @@ -9,6 +9,7 @@ import sympy as sp +from andes.utils.misc import elapsed from ams.core.matprocessor import MatProcessor logger = logging.getLogger(__name__) @@ -85,6 +86,8 @@ def __init__(self, parent): # mapping dict for evaluating expressions self.val_map = OrderedDict([ + (r'(== 0|<= 0)$', ''), # remove the comparison operator + (r'cp\.(Minimize|Maximize)', r'float'), # remove cp.Minimize/Maximize (r'\bcp.\b', 'np.'), ]) @@ -107,9 +110,12 @@ def generate_symbols(self, force_generate=False): """ Generate symbols for all variables. """ - if not force_generate and self.parent._syms: + logger.debug(f'Entering symbol generation for <{self.parent.class_name}>') + + if (not force_generate) and self.parent._syms: + logger.debug(' - Symbols already generated') return True - logger.debug(f'- Generating symbols for {self.parent.class_name}') + t, _ = elapsed() # process tex_names defined in routines # ----------------------------------------------------------- @@ -186,8 +192,10 @@ def generate_symbols(self, force_generate=False): self.inputs_dict['sys_mva'] = sp.symbols('sys_mva') self.parent._syms = True + _, s = elapsed(t) - return True + logger.debug(f' - Symbols generated in {s}') + return self.parent._syms def _check_expr_symbols(self, expr): """ diff --git a/ams/interop/andes.py b/ams/interface.py similarity index 93% rename from ams/interop/andes.py rename to ams/interface.py index aa6cb80a..fe93dd32 100644 --- a/ams/interop/andes.py +++ b/ams/interface.py @@ -1,49 +1,41 @@ """ -Interface with ANDES +Module for interfacing ANDES """ import os import logging from collections import OrderedDict, Counter -from andes.shared import pd, np from andes.utils.misc import elapsed from andes.system import System as andes_System +from ams.utils import create_entry from ams.io import input_formats -from ams.shared import nan +from ams.shared import nan, pd, np logger = logging.getLogger(__name__) # Models used in ANDES PFlow pflow_dict = OrderedDict([ - ('Bus', ['idx', 'u', 'name', - 'Vn', 'vmax', 'vmin', - 'v0', 'a0', 'xcoord', 'ycoord', - 'area', 'zone', 'owner']), - ('PQ', ['idx', 'u', 'name', - 'bus', 'Vn', 'p0', 'q0', - 'vmax', 'vmin', 'owner']), - ('PV', ['idx', 'u', 'name', 'Sn', - 'Vn', 'bus', 'busr', 'p0', 'q0', - 'pmax', 'pmin', 'qmax', 'qmin', - 'v0', 'vmax', 'vmin', 'ra', 'xs']), - ('Slack', ['idx', 'u', 'name', 'Sn', - 'Vn', 'bus', 'busr', 'p0', 'q0', - 'pmax', 'pmin', 'qmax', 'qmin', - 'v0', 'vmax', 'vmin', 'ra', 'xs', - 'a0']), - ('Shunt', ['idx', 'u', 'name', 'Sn', - 'Vn', 'bus', 'g', 'b', 'fn']), - ('Line', ['idx', 'u', 'name', - 'bus1', 'bus2', 'Sn', - 'fn', 'Vn1', 'Vn2', - 'r', 'x', 'b', 'g', 'b1', 'g1', 'b2', 'g2', - 'trans', 'tap', 'phi', - 'rate_a', 'rate_b', 'rate_c', - 'owner', 'xcoord', 'ycoord']), - ('Area', ['idx', 'u', 'name']), + ('Bus', create_entry('Vn', 'vmax', 'vmin', 'v0', 'a0', + 'xcoord', 'ycoord', 'area', 'zone', + 'owner')), + ('PQ', create_entry('bus', 'Vn', 'p0', 'q0', 'vmax', + 'vmin', 'owner')), + ('PV', create_entry('Sn', 'Vn', 'bus', 'busr', 'p0', 'q0', + 'pmax', 'pmin', 'qmax', 'qmin', + 'v0', 'vmax', 'vmin', 'ra', 'xs')), + ('Slack', create_entry('Sn', 'Vn', 'bus', 'busr', 'p0', 'q0', + 'pmax', 'pmin', 'qmax', 'qmin', + 'v0', 'vmax', 'vmin', 'ra', 'xs', 'a0')), + ('Shunt', create_entry('Sn', 'Vn', 'bus', 'g', 'b', 'fn')), + ('Line', create_entry('bus1', 'bus2', 'Sn', 'fn', 'Vn1', 'Vn2', + 'r', 'x', 'b', 'g', 'b1', 'g1', 'b2', 'g2', + 'trans', 'tap', 'phi', 'rate_a', 'rate_b', + 'rate_c', 'owner', 'xcoord', 'ycoord')), + ('Area', create_entry()), + ('Jumper', create_entry('bus1', 'bus2')), ]) # dict for guessing dynamic models given its idx @@ -324,7 +316,7 @@ def parse_addfile(adsys, amsys, addfile): syg_idx += syg.idx.v syg_bus_idx = adsys.SynGen.get(src='bus', attr='v', idx=syg_idx) syg_bus_vn = adsys.Bus.get(src='Vn', idx=syg_bus_idx) - adsys.SynGen.set(src='Vn', attr='v', idx=syg_idx, value=syg_bus_vn) + adsys.SynGen.set(src='Vn', idx=syg_idx, attr='v', value=syg_bus_vn) # --- for debugging --- adsys.df_in = df_models @@ -424,8 +416,7 @@ def _send_tgr(self, sa, sp): if syg_mask.any(): logger.debug('Governor is not complete for SynGen.') # --- pref --- - sa.TurbineGov.set(value=syg_ams, idx=gov_idx, - src='pref0', attr='v') + sa.TurbineGov.set(value=syg_ams, idx=gov_idx, attr='v', src='pref0') # --- paux --- # TODO: sync paux, using paux0 @@ -440,8 +431,7 @@ def _send_tgr(self, sa, sp): dg_ams = sp.recent.get(src='pg', attr='v', idx=stg_dg_idx, allow_none=True, default=0) # --- pref --- - sa.DG.set(value=dg_ams, idx=dg_idx, - src='pref0', attr='v') + sa.DG.set(src='pref0', idx=dg_idx, attr='v', value=dg_ams) # TODO: paux, using Pext0, this one should be do in other place rather than here # 3) RenGen @@ -495,9 +485,9 @@ def _send_dgu(self, sa, sp): msg += ' Otherwise, unexpected results might occur.' raise ValueError(msg) # FIXME: below code seems to be unnecessary - sa.SynGen.set(src='u', attr='v', idx=syg_idx, value=stg_u_ams) - sa.DG.set(src='u', attr='v', idx=dg_idx, value=dg_u_ams) - sa.RenGen.set(src='u', attr='v', idx=rg_idx, value=rg_u_ams) + sa.SynGen.set(src='u', idx=syg_idx, attr='v', value=stg_u_ams) + sa.DG.set(src='u', idx=dg_idx, attr='v', value=dg_u_ams) + sa.RenGen.set(src='u', idx=rg_idx, attr='v', value=rg_u_ams) return True def _sync_check(self, amsys, adsys): @@ -577,7 +567,7 @@ def send(self, adsys=None, routine=None): stg_idx = sp.StaticGen.get_idx() bus_stg = sp.StaticGen.get(src='bus', attr='v', idx=stg_idx) vBus = rtn.get(src='vBus', attr='v', idx=bus_stg) - sa.StaticGen.set(value=vBus, idx=stg_idx, src='v0', attr='v') + sa.StaticGen.set(src='v0', idx=stg_idx, attr='v', value=vBus) logger.info(f'*Send <{vname_ams}> to StaticGen.v0') # 1. gen online status; in TDS running, setting u is invalid @@ -609,7 +599,7 @@ def send(self, adsys=None, routine=None): if syg_mask.any(): logger.debug('Governor is not complete for SynGen.') # --- pref --- - sa.TurbineGov.set(value=syg_ams, idx=gov_idx, src='pref0', attr='v') + sa.TurbineGov.set(src='pref0', idx=gov_idx, attr='v', value=syg_ams) # --- DG: DG.pref0 --- dg_idx = sp.dyn.link['dg_idx'].dropna().tolist() # DG idx @@ -619,7 +609,7 @@ def send(self, adsys=None, routine=None): # corresponding StaticGen pg in AMS dg_ams = rtn.get(src='pg', attr='v', idx=stg_dg_idx) # --- pref --- - sa.DG.set(value=dg_ams, idx=dg_idx, src='pref0', attr='v') + sa.DG.set(src='pref0', idx=dg_idx, attr='v', value=dg_ams) # --- RenGen: seems unnecessary --- # TODO: which models/params are used to control output and auxillary power? @@ -634,7 +624,7 @@ def send(self, adsys=None, routine=None): # --- other scenarios --- if _dest_check(mname=mname_ads, pname=pname_ads, idx=idx_ads, adsys=sa): - mdl_ads.set(src=pname_ads, attr='v', idx=idx_ads, value=var_ams.v) + mdl_ads.set(src=pname_ads, idx=idx_ads, attr='v', value=var_ams.v) logger.warning(f'Send <{vname_ams}> to {mname_ads}.{pname_ads}') return True @@ -704,10 +694,8 @@ def receive(self, adsys=None, routine=None, no_update=False): idx=link['stg_idx'].values) # NOTE: only update u if changed actually u0_rtn = rtn.get(src=vname_ams, attr='v', idx=link['stg_idx'].values).copy() - rtn.set(src=vname_ams, attr='v', value=u_stg, - idx=link['stg_idx'].values) - rtn.set(src=vname_ams, attr='v', value=u_dyg, - idx=link['stg_idx'].values) + rtn.set(src=vname_ams, idx=link['stg_idx'].values, attr='v', value=u_stg) + rtn.set(src=vname_ams, idx=link['stg_idx'].values, attr='v', value=u_dyg) u_rtn = rtn.get(src=vname_ams, attr='v', idx=link['stg_idx'].values).copy() if not np.array_equal(u0_rtn, u_rtn): pname_to_update.append(vname_ams) @@ -744,10 +732,8 @@ def receive(self, adsys=None, routine=None, no_update=False): # Sync StaticGen.p first, then overwrite the ones with dynamic generator p_stg = sa.StaticGen.get(src='p', attr='v', idx=link['stg_idx'].values) - rtn.set(src=vname_ams, attr='v', value=p_stg, - idx=link['stg_idx'].values) - rtn.set(src=vname_ams, attr='v', value=p_dyg, - idx=link['stg_idx'].values) + rtn.set(src=vname_ams, idx=link['stg_idx'].values, attr='v', value=p_stg) + rtn.set(src=vname_ams, idx=link['stg_idx'].values, attr='v', value=p_dyg) pname_to_update.append(vname_ams) @@ -762,7 +748,7 @@ def receive(self, adsys=None, routine=None, no_update=False): # --- other scenarios --- if _dest_check(mname=mname_ads, pname=pname_ads, idx=idx_ads, adsys=sa): v_ads = mdl_ads.get(src=pname_ads, attr='v', idx=idx_ads) - rtn.set(src=vname_ams, attr='v', idx=idx_ads, value=v_ads) + rtn.set(src=vname_ams, idx=idx_ads, attr='v', value=v_ads) pname_to_update.append(vname_ams) logger.warning(f'Receive <{vname_ams}> from {mname_ads}.{pname_ads}') @@ -931,8 +917,11 @@ def make_link_table(adsys): ssa_key0 = pd.merge(left=ssa_key0, how='left', on='stg_idx', right=ssa_rg[['stg_idx', 'rg_idx']]) - ssa_key0 = ssa_key0.fillna(value=False) - dyr = ssa_key0['syg_idx'].astype(bool) + ssa_key0['dg_idx'].astype(bool) + ssa_key0['rg_idx'].astype(bool) + # NOTE: use this instead of fillna to avoid type conversion + idxc = ['stg_idx', 'syg_idx', 'dg_idx', 'rg_idx'] + ssa_key0[idxc] = ssa_key0[idxc].astype('str').replace({'nan': ''}).astype('bool') + + dyr = ssa_key0['syg_idx'] + ssa_key0['dg_idx'] + ssa_key0['rg_idx'] non_dyr = np.logical_not(dyr) ssa_dyr0 = ssa_key0[non_dyr] ssa_dyr0['gammap'] = 1 diff --git a/ams/interop/__init__.py b/ams/interop/__init__.py deleted file mode 100644 index 041597ad..00000000 --- a/ams/interop/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -""" -Interopability package between AMS and other software. - -To install dependencies, do: - -.. code:: bash - - pip install ams[interop] - -To install dependencies for *development*, in the AMS source code folder, do: - -.. code:: bash - - pip install -e .[interop] - -""" - -from ams.interop import andes # NOQA diff --git a/ams/io/__init__.py b/ams/io/__init__.py index c24061cb..997de88a 100644 --- a/ams/io/__init__.py +++ b/ams/io/__init__.py @@ -19,7 +19,6 @@ # The first file will be parsed by read() function and the addfile will be parsed by read_add() # Typically, column based formats, such as IEEE CDF and PSS/E RAW, are faster to parse -# TODO: add support for json I/O input_formats = { 'xlsx': ('xlsx',), 'json': ('json',), diff --git a/ams/io/json.py b/ams/io/json.py index 9ea4d9ac..7a8ab107 100644 --- a/ams/io/json.py +++ b/ams/io/json.py @@ -11,7 +11,7 @@ from andes.io.json import (testlines, read) # NOQA from andes.utils.paths import confirm_overwrite -from andes.system import System as andes_system +from ams.shared import empty_adsys, ad_models logger = logging.getLogger(__name__) @@ -62,13 +62,6 @@ def _dump_system(system, skip_empty, orient='records', to_andes=False): Dump parameters of each model into a json string and return them all in an OrderedDict. """ - ad_models = [] - if to_andes: - # Instantiate an ANDES system - sa = andes_system(setup=False, default_config=True, - codegen=False, autogen_stale=False, - no_undill=True,) - ad_models = list(sa.models.keys()) out = OrderedDict() for name, instance in system.models.items(): if skip_empty and instance.n == 0: @@ -79,7 +72,7 @@ def _dump_system(system, skip_empty, orient='records', to_andes=False): # NOTE: ommit parameters that are not in ANDES skip_params = [] ams_params = list(instance.params.keys()) - andes_params = list(sa.models[name].params.keys()) + andes_params = list(empty_adsys.models[name].params.keys()) skip_params = list(set(ams_params) - set(andes_params)) df = instance.cache.df_in.drop(skip_params, axis=1, errors='ignore') out[name] = df.to_dict(orient=orient) diff --git a/ams/io/matpower.py b/ams/io/matpower.py index 710fd39e..5e461e84 100644 --- a/ams/io/matpower.py +++ b/ams/io/matpower.py @@ -1,6 +1,5 @@ """ MATPOWER parser. -This module is revised from the existing module ``andes.io.matpower``. """ import logging import numpy as np @@ -23,7 +22,7 @@ def testlines(infile): def read(system, file): """ - Read a MATPOWER data file into mpc, and build andes device elements. + Read a MATPOWER data file into mpc, and build AMS device elements. """ mpc = m2mpc(file) @@ -34,16 +33,14 @@ def mpc2system(mpc: dict, system) -> bool: """ Load an mpc dict into an empty AMS system. - This function is revised from ``andes.io.matpower.mpc2system``. - - Compared to the original one, this function includes the generator cost data. + Revised from ``andes.io.matpower.mpc2system``. Note that `mbase` in mpc is converted to `Sn`, but it is not actually used in MATPOWER nor AMS. Parameters ---------- - system : andes.system.System + system : ams.system.System Empty system to load the data into. mpc : dict mpc struct names : numpy arrays @@ -205,11 +202,7 @@ def mpc2system(mpc: dict, system) -> bool: if 'gencost' in mpc: gcost_idx = 0 gen_idx = np.arange(mpc['gen'].shape[0]) + 1 - mpc_cost = np.zeros((mpc['gen'].shape[0], 7)) - if mpc['gencost'].shape[1] < 7: - mpc_cost[:, :mpc['gencost'].shape[1]] = mpc['gencost'] - else: - mpc_cost = mpc['gencost'] + mpc_cost = mpc['gencost'] for data, gen in zip(mpc_cost, gen_idx): # NOTE: only type 2 costs are supported for now # type startup shutdown n c2 c1 c0 @@ -220,9 +213,16 @@ def mpc2system(mpc: dict, system) -> bool: gctype = int(data[0]) startup = data[1] shutdown = data[2] - c2 = data[4] * base_mva ** 2 - c1 = data[5] * base_mva - c0 = data[6] + if data[3] == 3: + c2 = data[4] * base_mva ** 2 + c1 = data[5] * base_mva + c0 = data[6] + elif data[3] == 2: + c2 = 0 + c1 = data[4] * base_mva + c0 = data[5] + else: + raise ValueError('Unrecognized gencost model, please use eighter quadratic or linear cost model') system.add('GCost', gen=int(gen), u=1, type=gctype, idx=gcost_idx, diff --git a/ams/io/xlsx.py b/ams/io/xlsx.py index bc7826c4..12d40863 100644 --- a/ams/io/xlsx.py +++ b/ams/io/xlsx.py @@ -6,8 +6,8 @@ import logging from andes.io.xlsx import (read, testlines, confirm_overwrite, _add_book) # NOQA -from andes.shared import pd -from andes.system import System as andes_system + +from ams.shared import pd, empty_adsys, ad_models logger = logging.getLogger(__name__) @@ -61,13 +61,6 @@ def _write_system(system, writer, skip_empty, to_andes=False): Rewrite function ``andes.io.xlsx._write_system`` to skip non-andes sheets. """ - ad_models = [] - if to_andes: - # Instantiate an ANDES system - sa = andes_system(setup=False, default_config=True, - codegen=False, autogen_stale=False, - no_undill=True,) - ad_models = list(sa.models.keys()) for name, instance in system.models.items(): if skip_empty and instance.n == 0: continue @@ -78,7 +71,7 @@ def _write_system(system, writer, skip_empty, to_andes=False): # NOTE: ommit parameters that are not in ANDES skip_params = [] ams_params = list(instance.params.keys()) - andes_params = list(sa.models[name].params.keys()) + andes_params = list(empty_adsys.models[name].params.keys()) skip_params = list(set(ams_params) - set(andes_params)) df = instance.cache.df_in.drop(skip_params, axis=1, errors='ignore') else: diff --git a/ams/main.py b/ams/main.py index ad4e600f..58fc1f9c 100644 --- a/ams/main.py +++ b/ams/main.py @@ -60,9 +60,16 @@ def config_logger(stream_level=logging.INFO, *, `StreamHandler` verbosity level. file_level : {10, 20, 30, 40, 50}, optional `FileHandler` verbosity level. + Returns ------- None + + Notes + ----- + Copied from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ lg = logging.getLogger('ams') lg.setLevel(logging.DEBUG) @@ -105,7 +112,6 @@ def config_logger(stream_level=logging.INFO, *, coloredlogs.install(logger=lg, level=stream_level, fmt=sh_formatter_str) -# TODO: check ``load`` later on to see if some of them can be removed def load(case, setup=True, use_input_path=True, **kwargs): @@ -131,6 +137,12 @@ def load(case, setup=True, If one need to add devices in addition to these from the case file, do ``setup=False`` and call ``System.add()`` to add devices. When done, manually invoke ``setup()`` to set up the system. + + Notes + ----- + Revised from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ if use_input_path: input_path = kwargs.get('input_path', '') @@ -180,6 +192,12 @@ def run_case(case, *, routine='pflow', profile=False, add_book : str, optional Name of the device to be added to an excel case as a new sheet. + + Notes + ----- + Revised from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ pr = cProfile.Profile() @@ -249,6 +267,12 @@ def _run_mp_proc(cases, ncpu=NCPUS_PHYSICAL, **kwargs): Run multiprocessing with `Process`. Return values from `run_case` are not preserved. Always return `True` when done. + + Notes + ----- + Copied from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ # start processes @@ -283,6 +307,12 @@ def _run_mp_pool(cases, ncpu=NCPUS_PHYSICAL, verbose=logging.INFO, **kwargs): Verbosity level during multiprocessing verbose : 10, 20, 30, 40, 50 Verbosity level outside multiprocessing + + Notes + ----- + Copied from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ pool = Pool(ncpu) @@ -337,6 +367,11 @@ def run(filename, input_path='', verbose=20, mp_verbose=30, System or exit_code An instance of system (if `cli == False`) or an exit code otherwise.. + Notes + ----- + Copied from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ if is_interactive() and len(logger.handlers) == 0: @@ -459,6 +494,12 @@ def misc(edit_config='', save_config='', show_license=False, clean=True, recursi overwrite=None, version=False, **kwargs): """ Miscellaneous commands. + + Notes + ----- + Copied from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ if edit_conf(edit_config): @@ -487,6 +528,12 @@ def misc(edit_config='', save_config='', show_license=False, clean=True, recursi def doc(attribute=None, list_supported=False, config=False, **kwargs): """ Quick documentation from command-line. + + Notes + ----- + Revised from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ system = System() if attribute is not None: @@ -512,6 +559,12 @@ def demo(**kwargs): def versioninfo(): """ Print version info for AMS and dependencies. + + Notes + ----- + Revised from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ import numpy as np @@ -574,6 +627,12 @@ def edit_conf(edit_config: Optional[Union[str, bool]] = ''): ------- bool ``True`` is a config file is found and an editor is opened. ``False`` if ``edit_config`` is False. + + Notes + ----- + Copied from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ ret = False @@ -627,6 +686,12 @@ def save_conf(config_path=None, overwrite=None, **kwargs): ------- bool ``True`` is the save action is run. ``False`` otherwise. + + Notes + ----- + Copied from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ ret = False @@ -644,7 +709,7 @@ def save_conf(config_path=None, overwrite=None, **kwargs): return ret -# TODO: list AMS output files here +# TODO: change to AMS output types def remove_output(recursive=False): """ Remove the outputs generated by AMS, including power flow reports @@ -661,6 +726,12 @@ def remove_output(recursive=False): bool ``True`` is the function body executes with success. ``False`` otherwise. + + Notes + ----- + Copied from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ found = False cwd = os.getcwd() @@ -690,6 +761,12 @@ def remove_output(recursive=False): def selftest(quick=False, extra=False, **kwargs): """ Run unit tests. + + Notes + ----- + Copied from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ # map verbosity level from logging to unittest diff --git a/ams/models/__init__.py b/ams/models/__init__.py index 4d938067..354bb2da 100644 --- a/ams/models/__init__.py +++ b/ams/models/__init__.py @@ -10,7 +10,7 @@ ('bus', ['Bus']), ('static', ['PQ', 'Slack', 'PV']), ('shunt', ['Shunt']), - ('line', ['Line']), + ('line', ['Line', 'Jumper']), ('distributed', ['PVD1', 'ESD1', 'EV1', 'EV2']), ('renewable', ['REGCA1', 'REGCV1', 'REGCV2']), ('area', ['Area']), diff --git a/ams/models/group.py b/ams/models/group.py index 15fb217e..e762cf3a 100644 --- a/ams/models/group.py +++ b/ams/models/group.py @@ -199,8 +199,9 @@ class StaticGen(GroupBase): def __init__(self): super().__init__() - self.common_params.extend(('Sn', 'Vn', 'p0', 'q0', 'ra', 'xs', 'subidx', - 'bus', 'pmax', 'pmin', 'pg0', 'ctrl', 'R10')) + self.common_params.extend(('bus', 'Sn', 'Vn', 'p0', 'q0', 'ra', 'xs', 'subidx', + 'pmax', 'pmin', 'pg0', 'ctrl', 'R10', 'td1', 'td2', + 'zone')) self.common_vars.extend(('p', 'q')) @@ -210,6 +211,12 @@ def __init__(self): self.common_params.extend(('bus1', 'bus2', 'r', 'x')) +class ACShort(GroupBase): + def __init__(self): + super(ACShort, self).__init__() + self.common_params.extend(('bus1', 'bus2')) + + class StaticLoad(GroupBase): """ Static load group. @@ -217,7 +224,7 @@ class StaticLoad(GroupBase): def __init__(self): super().__init__() - self.common_params.extend(('p0', 'q0', 'zone')) + self.common_params.extend(('bus', 'p0', 'q0', 'ctrl', 'zone')) class StaticShunt(GroupBase): diff --git a/ams/models/line.py b/ams/models/line.py index 9d364df4..c137e035 100644 --- a/ams/models/line.py +++ b/ams/models/line.py @@ -1,4 +1,5 @@ from andes.models.line.line import LineData +from andes.models.line.jumper import JumperData from andes.core.param import NumParam from andes.shared import deg2rad, np, spmatrix @@ -208,3 +209,30 @@ def build_Bdc(self): Bdc[item, item] = 1e-6 return Bdc + + +class Jumper(JumperData, Model): + """ + Jumper is a device to short two buses (merging two buses into one). + + Jumper can connect two buses satisfying one of the following conditions: + + - neither bus is voltage-controlled + - either bus is voltage-controlled + - both buses are voltage-controlled, and the voltages are the same. + + If the buses are controlled in different voltages, power flow will + not solve (as the power flow through the jumper will be infinite). + + In the solutions, the ``p`` and ``q`` are flowing out of bus1 + and flowing into bus2. + + Setting a Jumper's connectivity status ``u`` to zero will disconnect the two + buses. In the case of a system split, one will need to call + ``System.connectivity()`` immediately following the split to detect islands. + """ + + def __init__(self, system=None, config=None) -> None: + JumperData.__init__(self) + Model.__init__(self, system, config) + self.group = 'ACShort' diff --git a/ams/models/reserve.py b/ams/models/reserve.py index 59aee826..04022daa 100644 --- a/ams/models/reserve.py +++ b/ams/models/reserve.py @@ -29,10 +29,10 @@ def __init__(self, system, config): self.group = 'Reserve' self.du = NumParam(default=0, info='Zonal RegUp reserve demand', - tex_name=r'd_{u}', unit='%',) + tex_name=r'd_{u}',) self.dd = NumParam(default=0, info='Zonal RegDown reserve demand', - tex_name=r'd_{d}', unit='%',) + tex_name=r'd_{d}',) class SR(ReserveData, Model): @@ -49,7 +49,7 @@ def __init__(self, system, config): ReserveData.__init__(self) Model.__init__(self, system, config) self.group = 'Reserve' - self.demand = NumParam(default=0.1, unit='%', + self.demand = NumParam(default=0.1, info='Zonal spinning reserve demand', tex_name=r'd_{SR}') @@ -68,7 +68,7 @@ def __init__(self, system, config): ReserveData.__init__(self) Model.__init__(self, system, config) self.group = 'Reserve' - self.demand = NumParam(default=0.1, unit='%', + self.demand = NumParam(default=0.1, info='Zonal non-spinning reserve demand', tex_name=r'd_{NSR}') diff --git a/ams/models/static/gen.py b/ams/models/static/gen.py index dabe42fc..fe6ff619 100644 --- a/ams/models/static/gen.py +++ b/ams/models/static/gen.py @@ -16,7 +16,13 @@ class GenParam: def __init__(self) -> None: self.ctrl = NumParam(default=1, info="generator controllability", - tex_name=r'c_{trl}',) + tex_name=r'c_{trl}', + unit='bool',) + self.uf = NumParam(default=0, + info="enforced on/off status; 0 unenforced, 1 enforced on, -1 enforced off", + tex_name=r'u_{f}', + unit='int', + vrange=(-1, 1)) self.Pc1 = NumParam(default=0.0, info="lower real power output of PQ capability curve", tex_name=r'P_{c1}', @@ -65,12 +71,12 @@ def __init__(self) -> None: tex_name=r'p_{g0}', unit='p.u.', ) - self.td1 = NumParam(default=0, + self.td1 = NumParam(default=1, info='minimum ON duration', tex_name=r't_{d1}', unit='h', ) - self.td2 = NumParam(default=0, + self.td2 = NumParam(default=0.5, info='minimum OFF duration', tex_name=r't_{d2}', unit='h', diff --git a/ams/opt/omodel.py b/ams/opt/omodel.py index a5e62131..d5ea65f4 100644 --- a/ams/opt/omodel.py +++ b/ams/opt/omodel.py @@ -8,17 +8,78 @@ import re import numpy as np -import scipy.sparse as spr from andes.core.common import Config from andes.utils.misc import elapsed import cvxpy as cp -from ams.shared import sps # NOQA +from ams.utils import pretty_long_message +from ams.shared import sps logger = logging.getLogger(__name__) +_prefix = r" - --------------> | " +_max_length = 80 + + +def ensure_symbols(func): + """ + Decorator to ensure that symbols are generated before parsing. + If not, it runs self.rtn.syms.generate_symbols(). + + Designed to be used on the `parse` method of the optimization elements (`OptzBase`) + and optimization model (`OModel`), i.e., `Var`, `Param`, `Constraint`, `Objective`, + and `ExpressionCalc`. + + Note: + ----- + Parsing before symbol generation can give wrong results. Ensure that symbols + are generated before calling the `parse` method. + """ + + def wrapper(self, *args, **kwargs): + if not self.rtn._syms: + logger.debug(f"<{self.rtn.class_name}> symbols are not generated yet. Generating now...") + self.rtn.syms.generate_symbols() + return func(self, *args, **kwargs) + return wrapper + + +def ensure_mats_and_parsed(func): + """ + Decorator to ensure that system matrices are built and OModel is parsed + before evaluation. If not, it runs the necessary methods to initialize them. + + Designed to be used on the `evaluate` method of the optimization elements (`OptzBase`) + and optimization model (`OModel`), i.e., `Var`, `Param`, `Constraint`, `Objective`, + and `ExpressionCalc`. + + Note: + ----- + Evaluation before matrices building and parsing can run into errors. Ensure that + system matrices are built and OModel is parsed before calling the `evaluate` method. + """ + + def wrapper(self, *args, **kwargs): + try: + if not self.rtn.system.mats.initialized: + logger.debug("System matrices are not built yet. Building now...") + self.rtn.system.mats.build() + if isinstance(self, (OptzBase, Var, Param, Constraint, Objective)): + if not self.om.parsed: + logger.debug("OModel is not parsed yet. Parsing now...") + self.om.parse() + elif isinstance(self, OModel): + if not self.parsed: + logger.debug("OModel is not parsed yet. Parsing now...") + self.parse() + except Exception as e: + logger.error(f"Error during initialization or parsing: {e}") + raise + return func(self, *args, **kwargs) + return wrapper + class OptzBase: """ @@ -35,6 +96,11 @@ class OptzBase: ---------- rtn : ams.routines.Routine The owner routine instance. + + Note: + ----- + Ensure that symbols are generated before calling the `parse` method. Parsing + before symbol generation can give wrong results. """ def __init__(self, @@ -49,13 +115,22 @@ def __init__(self, self.is_disabled = False self.rtn = None self.optz = None # corresponding optimization element + self.code = None + @ensure_symbols def parse(self): """ Parse the object. """ raise NotImplementedError + @ensure_mats_and_parsed + def evaluate(self): + """ + Evaluate the object. + """ + raise NotImplementedError + @property def class_name(self): """ @@ -81,7 +156,7 @@ def shape(self): try: return self.om.__dict__[self.name].shape except KeyError: - logger.warning('Shape info is not ready before initialziation.') + logger.warning('Shape info is not ready before initialization.') return None @property @@ -92,7 +167,7 @@ def size(self): if self.rtn.initialized: return self.om.__dict__[self.name].size else: - logger.warning(f'<{self.rtn.class_name}> is not initialized yet.') + logger.warning(f'Routine <{self.rtn.class_name}> is not initialized yet.') return None @@ -114,14 +189,10 @@ def __init__(self, self.e_str = e_str self.code = None - def parse(self, no_code=True): + @ensure_symbols + def parse(self): """ Parse the Expression. - - Parameters - ---------- - no_code : bool, optional - Flag indicating if the code should be shown, True by default. """ # parse the expression str sub_map = self.om.rtn.syms.sub_map @@ -129,20 +200,44 @@ def parse(self, no_code=True): for pattern, replacement in sub_map.items(): try: code_expr = re.sub(pattern, replacement, code_expr) - except TypeError as e: - logger.error(f"Error in parsing expr <{self.name}>.") - raise e + except Exception as e: + raise Exception(f"Error in parsing expr <{self.name}>.\n{e}") # store the parsed expression str code self.code = code_expr - code_expr = "self.optz = " + code_expr - msg = f"- Parse ExpressionCalc <{self.name}>: {self.e_str} " - logger.debug(msg) - if not no_code: - logger.info(f"<{self.name}> code: {code_expr}") - # execute the expression - exec(code_expr, globals(), locals()) + msg = f" - ExpressionCalc <{self.name}>: {self.e_str}" + logger.debug(pretty_long_message(msg, _prefix, max_length=_max_length)) + return True + + @ensure_mats_and_parsed + def evaluate(self): + """ + Evaluate the expression. + """ + msg = f" - Expression <{self.name}>: {self.code}" + logger.debug(pretty_long_message(msg, _prefix, max_length=_max_length)) + try: + local_vars = {'self': self, 'np': np, 'cp': cp} + self.optz = self._evaluate_expression(self.code, local_vars=local_vars) + except Exception as e: + raise Exception(f"Error in evaluating expr <{self.name}>.\n{e}") return True + def _evaluate_expression(self, code, local_vars=None): + """ + Helper method to evaluate the expression code. + + Parameters + ---------- + code : str + The code string representing the expression. + + Returns + ------- + cp.Expression + The evaluated cvxpy expression. + """ + return eval(code, {}, local_vars) + @property def v(self): """ @@ -207,7 +302,7 @@ def __init__(self, integer: Optional[bool] = False, pos: Optional[bool] = False, neg: Optional[bool] = False, - sparse: Optional[list] = False, + sparse: Optional[bool] = False, ): OptzBase.__init__(self, name=name, info=info, unit=unit) self.no_parse = no_parse # True to skip parsing the parameter @@ -228,32 +323,52 @@ def __init__(self, ('neg', neg), ))) + @ensure_symbols def parse(self): """ Parse the parameter. + + Returns + ------- + bool + Returns True if the parsing is successful, False otherwise. """ - config = self.config.as_dict() # NOQA sub_map = self.om.rtn.syms.sub_map - shape = np.shape(self.v) - # NOTE: it seems that there is no need to use re.sub here - code_param = f"self.optz=param(shape={shape}, **config)" + code_param = "param(**config)" for pattern, replacement, in sub_map.items(): - code_param = re.sub(pattern, replacement, code_param) - exec(code_param, globals(), locals()) + try: + code_param = re.sub(pattern, replacement, code_param) + except Exception as e: + raise Exception(f"Error in parsing param <{self.name}>.\n{e}") + self.code = code_param + return True + + @ensure_mats_and_parsed + def evaluate(self): + """ + Evaluate the parameter. + """ + if self.no_parse: + return True + + config = self.config.as_dict() try: msg = f"Parameter <{self.name}> is set as sparse, " msg += "but the value is not sparse." - val = "self.v" if self.sparse: - if not spr.issparse(self.v): - val = "sps.csr_matrix(self.v)" - exec(f"self.optz.value = {val}", globals(), locals()) + self.v = sps.csr_matrix(self.v) + + # Create the cvxpy.Parameter object + self.optz = cp.Parameter(shape=self.v.shape, **config) + self.optz.value = self.v except ValueError: msg = f"Parameter <{self.name}> has non-numeric value, " - msg += "no_parse=True is applied." + msg += "set `no_parse=True`." logger.warning(msg) self.no_parse = True - return False + return True + except Exception as e: + raise Exception(f"Error in evaluating param <{self.name}>.\n{e}") return True def update(self): @@ -426,23 +541,12 @@ def get_idx(self): else: return self.owner.idx.v + @ensure_symbols def parse(self): """ Parse the variable. """ sub_map = self.om.rtn.syms.sub_map - # only used for CVXPY - # NOTE: Config only allow lower case letters, do a conversion here - config = {} - for k, v in self.config.as_dict().items(): - if k == 'psd': - config['PSD'] = v - elif k == 'nsd': - config['NSD'] = v - elif k == 'bool': - config['boolean'] = v - else: - config[k] = v # NOTE: number of rows is the size of the source variable if self.owner is not None: nr = self.owner.n @@ -463,11 +567,44 @@ def parse(self): nc = shape[1] if len(shape) > 1 else 0 else: raise ValueError(f"Invalid shape {self._shape}.") - code_var = f"self.optz=var({shape}, **config)" + code_var = f"var({shape}, **config)" + logger.debug(f" - Var <{self.name}>: {self.code}") for pattern, replacement, in sub_map.items(): - code_var = re.sub(pattern, replacement, code_var) - # build the Var object - exec(code_var, globals(), locals()) + try: + code_var = re.sub(pattern, replacement, code_var) + except Exception as e: + raise Exception(f"Error in parsing var <{self.name}>.\n{e}") + self.code = code_var + return True + + @ensure_mats_and_parsed + def evaluate(self): + """ + Evaluate the variable. + + Returns + ------- + bool + Returns True if the evaluation is successful, False otherwise. + """ + # NOTE: in CVXPY, Config only allow lower case letters + config = {} # used in `self.code` + for k, v in self.config.as_dict().items(): + if k == 'psd': + config['PSD'] = v + elif k == 'nsd': + config['NSD'] = v + elif k == 'bool': + config['boolean'] = v + else: + config[k] = v + msg = f" - Var <{self.name}>: {self.code}" + logger.debug(pretty_long_message(msg, _prefix, max_length=_max_length)) + try: + local_vars = {'self': self, 'config': config, 'cp': cp} + self.optz = eval(self.code, {}, local_vars) + except Exception as e: + raise Exception(f"Error in evaluating var <{self.name}>.\n{e}") return True def __repr__(self): @@ -499,13 +636,19 @@ class Constraint(OptzBase): Flag indicating if the constraint is disabled, False by default. rtn : ams.routines.Routine The owner routine instance. + is_disabled : bool, optional + Flag indicating if the constraint is disabled, False by default. + dual : float, optional + The dual value of the constraint. + code : str, optional + The code string for the constraint """ def __init__(self, name: Optional[str] = None, e_str: Optional[str] = None, info: Optional[str] = None, - is_eq: Optional[str] = False, + is_eq: Optional[bool] = False, ): OptzBase.__init__(self, name=name, info=info) self.e_str = e_str @@ -513,16 +656,11 @@ def __init__(self, self.is_disabled = False self.dual = None self.code = None - # TODO: add constraint info from solver, maybe dual? - def parse(self, no_code=True): + @ensure_symbols + def parse(self): """ Parse the constraint. - - Parameters - ---------- - no_code : bool, optional - Flag indicating if the code should be shown, True by default. """ # parse the expression str sub_map = self.om.rtn.syms.sub_map @@ -531,34 +669,59 @@ def parse(self, no_code=True): try: code_constr = re.sub(pattern, replacement, code_constr) except TypeError as e: - logger.error(f"Error in parsing constr <{self.name}>.") - raise e - # store the parsed expression str code - self.code = code_constr + raise TypeError(f"Error in parsing constr <{self.name}>.\n{e}") # parse the constraint type - code_constr = "self.optz=" + code_constr code_constr += " == 0" if self.is_eq else " <= 0" - msg = f"- Parse Constr <{self.name}>: {self.e_str} " - msg += "== 0" if self.is_eq else "<= 0" - logger.debug(msg) - if not no_code: - logger.info(f"<{self.name}> code: {code_constr}") - # set the parsed constraint - exec(code_constr, globals(), locals()) + # store the parsed expression str code + self.code = code_constr + msg = f" - Constr <{self.name}>: {self.e_str}" + logger.debug(pretty_long_message(msg, _prefix, max_length=_max_length)) return True + def _evaluate_expression(self, code, local_vars=None): + """ + Helper method to evaluate the expression code. + + Parameters + ---------- + code : str + The code string representing the expression. + + Returns + ------- + cp.Expression + The evaluated cvxpy expression. + """ + return eval(code, {}, local_vars) + + @ensure_mats_and_parsed + def evaluate(self): + """ + Evaluate the constraint. + """ + msg = f" - Constr <{self.name}>: {self.code}" + logger.debug(pretty_long_message(msg, _prefix, max_length=_max_length)) + try: + local_vars = {'self': self, 'cp': cp, 'sub_map': self.om.rtn.syms.val_map} + self.optz = self._evaluate_expression(self.code, local_vars=local_vars) + except Exception as e: + raise Exception(f"Error in evaluating constr <{self.name}>.\n{e}") + def __repr__(self): enabled = 'OFF' if self.is_disabled else 'ON' out = f"{self.class_name}: {self.name} [{enabled}]" return out @property - def v2(self): + def e(self): """ Return the calculated constraint LHS value. - Note that ``v`` should be used primarily as it is obtained + Note that `v` should be used primarily as it is obtained from the solver directly. - ``v2`` is for debugging purpose, and should be consistent with ``v``. + + `e` is for debugging purpose. For a successfully solved problem, + `e` should equal to `v`. However, when a problem is infeasible + or unbounded, `e` can be used to check the constraint LHS value. """ if self.code is None: logger.info(f"Constraint <{self.name}> is not parsed yet.") @@ -570,15 +733,15 @@ def v2(self): try: code = re.sub(pattern, replacement, code) except TypeError as e: - logger.error(f"Error in parsing value for constr <{self.name}>.") - raise e + raise TypeError(e) try: - logger.debug(f"Value code: {code}") - return eval(code) + logger.debug(pretty_long_message(f"Value code: {code}", + _prefix, max_length=_max_length)) + local_vars = {'self': self, 'np': np, 'cp': cp, 'val_map': val_map} + return self._evaluate_expression(code, local_vars) except Exception as e: - logger.error(f"Error in calculating constr <{self.name}>.") - logger.error(f"Original error: {e}") + logger.error(f"Error in calculating constr <{self.name}>.\n{e}") return None @property @@ -629,6 +792,8 @@ class Objective(OptzBase): computation. rtn : ams.routines.Routine The owner routine instance. + code : str + The code string for the objective function. """ def __init__(self, @@ -643,12 +808,16 @@ def __init__(self, self.code = None @property - def v2(self): + def e(self): """ Return the calculated objective value. - Note that ``v`` should be used primarily as it is obtained + + Note that `v` should be used primarily as it is obtained from the solver directly. - ``v2`` is for debugging purpose, and should be consistent with ``v``. + + `e` is for debugging purpose. For a successfully solved problem, + `e` should equal to `v`. However, when a problem is infeasible + or unbounded, `e` can be used to check the objective value. """ if self.code is None: logger.info(f"Objective <{self.name}> is not parsed yet.") @@ -664,11 +833,12 @@ def v2(self): raise e try: - logger.debug(f"Value code: {code}") - return eval(code) + logger.debug(pretty_long_message(f"Value code: {code}", + _prefix, max_length=_max_length)) + local_vars = {'self': self, 'np': np, 'cp': cp, 'val_map': val_map} + return self._evaluate_expression(code, local_vars) except Exception as e: - logger.error(f"Error in calculating obj <{self.name}>.") - logger.error(f"Original error: {e}") + logger.error(f"Error in calculating obj <{self.name}>.\n{e}") return None @property @@ -685,34 +855,68 @@ def v(self): def v(self, value): raise AttributeError("Cannot set the value of the objective function.") - def parse(self, no_code=True): + @ensure_symbols + def parse(self): """ Parse the objective function. - Parameters - ---------- - no_code : bool, optional - Flag indicating if the code should be shown, True by default. + Returns + ------- + bool + Returns True if the parsing is successful, False otherwise. """ # parse the expression str sub_map = self.om.rtn.syms.sub_map code_obj = self.e_str for pattern, replacement, in sub_map.items(): - code_obj = re.sub(pattern, replacement, code_obj) + try: + code_obj = re.sub(pattern, replacement, code_obj) + except Exception as e: + raise Exception(f"Error in parsing obj <{self.name}>.\n{e}") # store the parsed expression str code self.code = code_obj if self.sense not in ['min', 'max']: raise ValueError(f'Objective sense {self.sense} is not supported.') sense = 'cp.Minimize' if self.sense == 'min' else 'cp.Maximize' - code_obj = f"self.optz={sense}({code_obj})" - logger.debug(f"Parse Objective <{self.name}>: {self.sense.upper()}. {self.e_str}") - if not no_code: - logger.info(f"Code: {code_obj}") - # set the parsed objective function - exec(code_obj, globals(), locals()) - exec("self.om.obj = self.optz", globals(), locals()) + self.code = f"{sense}({code_obj})" + msg = f" - Objective <{self.name}>: {self.code}" + logger.debug(pretty_long_message(msg, _prefix, max_length=_max_length)) + return True + + @ensure_mats_and_parsed + def evaluate(self): + """ + Evaluate the objective function. + + Returns + ------- + bool + Returns True if the evaluation is successful, False otherwise. + """ + logger.debug(f" - Objective <{self.name}>: {self.e_str}") + try: + local_vars = {'self': self, 'cp': cp} + self.optz = self._evaluate_expression(self.code, local_vars=local_vars) + except Exception as e: + raise Exception(f"Error in evaluating obj <{self.name}>.\n{e}") return True + def _evaluate_expression(self, code, local_vars=None): + """ + Helper method to evaluate the expression code. + + Parameters + ---------- + code : str + The code string representing the expression. + + Returns + ------- + cp.Expression + The evaluated cvxpy expression. + """ + return eval(code, {}, local_vars) + def __repr__(self): return f"{self.class_name}: {self.name} [{self.sense.upper()}]" @@ -738,6 +942,12 @@ class OModel: Constraints. obj: Objective Objective function. + initialized: bool + Flag indicating if the model is initialized. + parsed: bool + Flag indicating if the model is parsed. + evaluated: bool + Flag indicating if the model is evaluated. """ def __init__(self, routine): @@ -749,90 +959,194 @@ def __init__(self, routine): self.obj = None self.initialized = False self.parsed = False + self.evaluated = False + self.finalized = False - def parse(self, no_code=True): + @ensure_symbols + def parse(self, force=False): """ Parse the optimization model from the symbolic description. + This method should be called after the routine symbols are generated + `self.rtn.syms.generate_symbols()`. It parses the following components + of the optimization model: parameters, decision variables, constraints, + objective function, and expressions. + Parameters ---------- - no_code : bool, optional - Flag indicating if the parsing code should be displayed, - True by default. - """ - rtn = self.rtn - rtn.syms.generate_symbols(force_generate=False) + force : bool, optional + Flag indicating if to force the parsing. + Returns + ------- + bool + Returns True if the parsing is successful, False otherwise. + """ + if self.parsed and not force: + logger.debug("Model is already parsed.") + return self.parsed + t, _ = elapsed() # --- add RParams and Services as parameters --- - t0, _ = elapsed() - logger.debug(f'Parsing OModel for {rtn.class_name}') - for key, val in rtn.params.items(): + logger.warning(f'Parsing OModel for <{self.rtn.class_name}>') + for key, val in self.rtn.params.items(): if not val.no_parse: - try: - val.parse() - except Exception as e: - msg = f"Failed to parse Param <{key}>. " - msg += f"Original error: {e}" - raise Exception(msg) - setattr(self, key, val.optz) - _, s = elapsed(t0) - logger.debug(f"Parse Params in {s}") + val.parse() # --- add decision variables --- - t0, _ = elapsed() - for key, val in rtn.vars.items(): - try: - val.parse() - except Exception as e: - msg = f"Failed to parse Var <{key}>. " - msg += f"Original error: {e}" - raise Exception(msg) - setattr(self, key, val.optz) - _, s = elapsed(t0) - logger.debug(f"Parse Vars in {s}") + for key, val in self.rtn.vars.items(): + val.parse() # --- add constraints --- - t0, _ = elapsed() - for key, val in rtn.constrs.items(): - try: - val.parse(no_code=no_code) - except Exception as e: - msg = f"Failed to parse Constr <{key}>. " - msg += f"Original error: {e}" - raise Exception(msg) - setattr(self, key, val.optz) - _, s = elapsed(t0) - logger.debug(f"Parse Constrs in {s}") + for key, val in self.rtn.constrs.items(): + val.parse() # --- parse objective functions --- - t0, _ = elapsed() - if rtn.type != 'PF': - if rtn.obj is not None: + if self.rtn.type != 'PF': + if self.rtn.obj is not None: try: - rtn.obj.parse(no_code=no_code) + self.rtn.obj.parse() except Exception as e: - msg = f"Failed to parse Objective <{rtn.obj.name}>. " - msg += f"Original error: {e}" - raise Exception(msg) + raise Exception(f"Failed to parse Objective <{self.rtn.obj.name}>.\n{e}") else: - logger.warning(f"{rtn.class_name} has no objective function!") - _, s = elapsed(t0) + logger.warning(f"{self.rtn.class_name} has no objective function!") self.parsed = False return self.parsed - _, s = elapsed(t0) - logger.debug(f"Parse Objective in {s}") # --- parse expressions --- - t0, _ = elapsed() for key, val in self.rtn.exprs.items(): - val.parse(no_code=no_code) - _, s = elapsed(t0) - logger.debug(f"Parse Expressions in {s}") + try: + val.parse() + except Exception as e: + raise Exception(f"Failed to parse ExpressionCalc <{key}>.\n{e}") + _, s = elapsed(t) + logger.debug(f" -> Parsed in {s}") self.parsed = True return self.parsed - def init(self, no_code=True): + def _evaluate_params(self): + """ + Evaluate the parameters. + """ + for key, val in self.rtn.params.items(): + try: + val.evaluate() + setattr(self, key, val.optz) + except Exception as e: + raise Exception(f"Failed to evaluate Param <{key}>.\n{e}") + + def _evaluate_vars(self): + """ + Evaluate the decision variables. + """ + for key, val in self.rtn.vars.items(): + try: + val.evaluate() + setattr(self, key, val.optz) + except Exception as e: + raise Exception(f"Failed to evaluate Var <{key}>.\n{e}") + + def _evaluate_constrs(self): + """ + Evaluate the constraints. + """ + for key, val in self.rtn.constrs.items(): + try: + val.evaluate() + setattr(self, key, val.optz) + except Exception as e: + raise Exception(f"Failed to evaluate Constr <{key}>.\n{e}") + + def _evaluate_obj(self): + """ + Evaluate the objective function. + """ + # NOTE: since we already have the attribute `obj`, + # we can update it rather than setting it + if self.rtn.type != 'PF': + self.rtn.obj.evaluate() + self.obj = self.rtn.obj.optz + + def _evaluate_exprs(self): + """ + Evaluate the expressions. + """ + for key, val in self.rtn.exprs.items(): + try: + val.evaluate() + except Exception as e: + raise Exception(f"Failed to evaluate ExpressionCalc <{key}>.\n{e}") + + @ensure_mats_and_parsed + def evaluate(self, force=False): + """ + Evaluate the optimization model. + + This method should be called after `self.parse()`. It evaluates the following + components of the optimization model: parameters, decision variables, constraints, + objective function, and expressions. + + Parameters + ---------- + force : bool, optional + Flag indicating if to force the evaluation + + Returns + ------- + bool + Returns True if the evaluation is successful, False otherwise. + """ + if self.evaluated and not force: + logger.debug("Model is already evaluated.") + return self.evaluated + logger.warning(f"Evaluating OModel for <{self.rtn.class_name}>") + t, _ = elapsed() + + self._evaluate_params() + self._evaluate_vars() + self._evaluate_constrs() + self._evaluate_obj() + self._evaluate_exprs() + + self.evaluated = True + _, s = elapsed(t) + logger.debug(f" -> Evaluated in {s}") + return self.evaluated + + def finalize(self, force=False): + """ + Finalize the optimization model. + + This method should be called after `self.evaluate()`. It assemble the optimization + problem from the evaluated components. + + Returns + ------- + bool + Returns True if the finalization is successful, False otherwise. + """ + # NOTE: for power flow type, we skip the finalization + if self.rtn.type == 'PF': + self.finalized = True + return self.finalized + if self.finalized and not force: + logger.debug("Model is already finalized.") + return self.finalized + logger.warning(f"Finalizing OModel for <{self.rtn.class_name}>") + t, _ = elapsed() + + # Collect constraints that are not disabled + constrs_add = [val.optz for key, val in self.rtn.constrs.items( + ) if not val.is_disabled and val is not None] + # Construct the problem using cvxpy.Problem + self.prob = cp.Problem(self.obj, constrs_add) + + _, s = elapsed(t) + logger.debug(f" -> Finalized in {s}") + self.finalized = True + return self.finalized + + def init(self, force=False): """ Set up the optimization model from the symbolic description. @@ -841,47 +1155,29 @@ def init(self, no_code=True): Parameters ---------- - no_code : bool, optional - Flag indicating if the parsing code should be displayed, - True by default. + force : bool, optional + Flag indicating if to force the OModel initialization. + If True, following methods will be called by force: `self.parse()`, + `self.evaluate()`, `self.finalize()` Returns ------- bool Returns True if the setup is successful, False otherwise. """ - t_setup, _ = elapsed() - - if not self.parsed: - self.parse(no_code=no_code) - - if self.rtn.type == 'PF': - _, s_setup = elapsed(t_setup) - self.initialized = True - logger.debug(f"OModel for <{self.rtn.class_name}> initialized in {s_setup}.") + if self.initialized and not force: + logger.debug("OModel is already initialized.") return self.initialized - # --- finalize the optimziation formulation --- - code_prob = "self.prob = problem(self.obj, " - constrs_skip = [] - constrs_add = [] - for key, val in self.rtn.constrs.items(): - if (val.is_disabled) or (val is None): - constrs_skip.append(f'<{key}>') - else: - constrs_add.append(val.optz) - code_prob += "[constr for constr in constrs_add])" - for pattern, replacement in self.rtn.syms.sub_map.items(): - code_prob = re.sub(pattern, replacement, code_prob) + t, _ = elapsed() - t_final, _ = elapsed() - exec(code_prob, globals(), locals()) - _, s_final = elapsed(t_final) - logger.debug(f"Finalize in {s_final}") + self.parse(force=force) + self.evaluate(force=force) + self.finalize(force=force) - _, s_setup = elapsed(t_setup) + _, s = elapsed(t) self.initialized = True - logger.debug(f"OModel for <{self.rtn.class_name}> initialized in {s_setup}.") + logger.debug(f"OModel for <{self.rtn.class_name}> initialized in {s}") return self.initialized @@ -906,9 +1202,9 @@ def _register_attribute(self, key, value): elif isinstance(value, cp.Parameter): self.params[key] = value - def __setattr__(self, __name: str, __value: Any): - self._register_attribute(__name, __value) - super().__setattr__(__name, __value) + def __setattr__(self, name: str, value: Any): + super().__setattr__(name, value) + self._register_attribute(name, value) def update(self, params): """ diff --git a/ams/report.py b/ams/report.py index 0442cd6e..ea3e62f6 100644 --- a/ams/report.py +++ b/ams/report.py @@ -28,6 +28,12 @@ def report_info(system) -> list: class Report: """ Report class to store routine analysis reports. + + Notes + ----- + Revised from the ANDES project (https://github.com/CURENT/andes). + Original author: Hantao Cui + License: GPL3 """ def __init__(self, system): diff --git a/ams/routines/__init__.py b/ams/routines/__init__.py index c3dd4b06..15f15565 100644 --- a/ams/routines/__init__.py +++ b/ams/routines/__init__.py @@ -4,7 +4,6 @@ from collections import OrderedDict from andes.utils.func import list_flatten -from ams.routines.routine import RoutineBase # NOQA all_routines = OrderedDict([ ('dcpf', ['DCPF']), diff --git a/ams/routines/acopf.py b/ams/routines/acopf.py index 3f46f6d7..3a746ffb 100644 --- a/ams/routines/acopf.py +++ b/ams/routines/acopf.py @@ -93,10 +93,10 @@ def solve(self, method=None, **kwargs): res, sstats = runopf(casedata=ppc, ppopt=ppopt, **kwargs) return res, sstats - def run(self, force_init=False, no_code=True, - method=None, **kwargs): + def run(self, **kwargs): """ Run ACOPF using PYPOWER with PIPS. + *args and **kwargs go to `self.solve()`, which are not used yet. Examples -------- @@ -117,6 +117,4 @@ def run(self, force_init=False, no_code=True, exit_code : int Exit code of the routine. """ - super().run(force_init=force_init, - no_code=no_code, method=method, - **kwargs, ) + super().run(**kwargs) diff --git a/ams/routines/dcopf.py b/ams/routines/dcopf.py index 844e423e..f2220cdf 100644 --- a/ams/routines/dcopf.py +++ b/ams/routines/dcopf.py @@ -20,6 +20,12 @@ class DCOPF(RoutineBase): DC optimal power flow (DCOPF). The nodal price is calculated as ``pi`` in ``pic``. + + References + ---------- + 1. R. D. Zimmerman, C. E. Murillo-Sanchez, and R. J. Thomas, “MATPOWER: Steady-State Operations, Planning, and + Analysis Tools for Power Systems Research and Education,” IEEE Trans. Power Syst., vol. 26, no. 1, pp. 12–19, + Feb. 2011 """ def __init__(self, system, config): @@ -94,11 +100,19 @@ def __init__(self, system, config): model='Slack', src='bus', no_parse=True,) # --- load --- + self.upq = RParam(info='Load connection status', + name='upq', tex_name=r'u_{PQ}', + model='StaticLoad', src='u', + no_parse=True,) self.pd = RParam(info='active demand', name='pd', tex_name=r'p_{d}', model='StaticLoad', src='p0', unit='p.u.',) # --- line --- + self.ul = RParam(info='Line connection status', + name='ul', tex_name=r'u_{l}', + model='Line', src='u', + no_parse=True,) self.rate_a = RParam(info='long-term flow limit', name='rate_a', tex_name=r'R_{ATEA}', unit='p.u.', model='Line',) @@ -181,7 +195,7 @@ def __init__(self, system, config): unit='$/p.u.', model='Bus',) # --- power balance --- - pb = 'Bbus@aBus + Pbusinj + Cl@pd + Csh@gsh - Cg@pg' + pb = 'Bbus@aBus + Pbusinj + Cl@(mul(upq, pd)) + Csh@gsh - Cg@pg' self.pb = Constraint(name='pb', info='power balance', e_str=pb, is_eq=True,) # --- line flow --- @@ -191,10 +205,10 @@ def __init__(self, system, config): model='Line',) self.plflb = Constraint(info='line flow lower bound', name='plflb', is_eq=False, - e_str='-Bf@aBus - Pfinj - rate_a',) + e_str='-Bf@aBus - Pfinj - mul(ul, rate_a)',) self.plfub = Constraint(info='line flow upper bound', name='plfub', is_eq=False, - e_str='Bf@aBus + Pfinj - rate_a',) + e_str='Bf@aBus + Pfinj - mul(ul, rate_a)',) self.alflb = Constraint(info='line angle difference lower bound', name='alflb', is_eq=False, e_str='-CftT@aBus + amin',) @@ -220,20 +234,29 @@ def __init__(self, system, config): def solve(self, **kwargs): """ Solve the routine optimization model. + args and kwargs go to `self.om.prob.solve()` (`cvxpy.Problem.solve()`). """ return self.om.prob.solve(**kwargs) - def run(self, no_code=True, **kwargs): + def run(self, **kwargs): """ Run the routine. + Following kwargs go to `self.init()`: `force`, `force_mats`, `force_constr`, `force_om`. + + Following kwargs go to `self.solve()`: `solver`, `verbose`, `gp`, `qcp`, `requires_grad`, + `enforce_dpp`, `ignore_dpp`, `method`, and all rest. + Parameters ---------- - no_code : bool, optional - If True, print the generated CVXPY code. Defaults to False. - - Other Parameters - ---------------- + force : bool, optional + If True, force re-initialization. Defaults to False. + force_mats : bool, optional + If True, force re-generating matrices. Defaults to False. + force_constr : bool, optional + Whether to turn on all constraints. + force_om : bool, optional + If True, force re-generating optimization model. Defaults to False. solver: str, optional The solver to use. For example, 'GUROBI', 'ECOS', 'SCS', or 'OSQP'. verbose : bool, optional @@ -259,10 +282,8 @@ def run(self, no_code=True, **kwargs): When True, DPP problems will be treated as non-DPP, which may speed up compilation. Defaults to False. method : function, optional A custom solve method to use. - kwargs : keywords, optional - Additional solver specific arguments. See CVXPY documentation for details. """ - return RoutineBase.run(self, no_code=no_code, **kwargs) + return super().run(**kwargs) def _post_solve(self): """ @@ -297,9 +318,9 @@ def unpack(self, **kwargs): pass # NOTE: only unpack the variables that are in the model or group try: - var.owner.set(src=var.src, attr='v', idx=idx, value=var.v) - # failed to find source var in the owner (model or group) + var.owner.set(src=var.src, idx=idx, attr='v', value=var.v) except (KeyError, TypeError): + logger.error(f'Failed to unpack <{var}> to <{var.owner.class_name}>.') pass # label the most recent solved routine @@ -325,16 +346,16 @@ def dc2ac(self, kloss=1.0, **kwargs): pq_idx = self.system.StaticLoad.get_idx() pd0 = self.system.StaticLoad.get(src='p0', attr='v', idx=pq_idx).copy() qd0 = self.system.StaticLoad.get(src='q0', attr='v', idx=pq_idx).copy() - self.system.StaticLoad.set(src='p0', attr='v', idx=pq_idx, value=pd0 * kloss) - self.system.StaticLoad.set(src='q0', attr='v', idx=pq_idx, value=qd0 * kloss) + self.system.StaticLoad.set(src='p0', idx=pq_idx, attr='v', value=pd0 * kloss) + self.system.StaticLoad.set(src='q0', idx=pq_idx, attr='v', value=qd0 * kloss) ACOPF = self.system.ACOPF # run ACOPF ACOPF.run() # self.exec_time += ACOPF.exec_time # scale load back - self.system.StaticLoad.set(src='p0', attr='v', idx=pq_idx, value=pd0) - self.system.StaticLoad.set(src='q0', attr='v', idx=pq_idx, value=qd0) + self.system.StaticLoad.set(src='p0', idx=pq_idx, value=pd0) + self.system.StaticLoad.set(src='q0', idx=pq_idx, value=qd0) if not ACOPF.exit_code == 0: logger.warning(' did not converge, conversion failed.') # NOTE: mock results to fit interface with ANDES diff --git a/ams/routines/dcpf.py b/ams/routines/dcpf.py index ff0e89de..d0f84735 100644 --- a/ams/routines/dcpf.py +++ b/ams/routines/dcpf.py @@ -127,10 +127,10 @@ def solve(self, method=None): res, sstats = runpf(casedata=ppc, ppopt=ppopt) return res, sstats - def run(self, force_init=False, no_code=True, - method=None, **kwargs): + def run(self, **kwargs): """ Run DC pwoer flow. + *args and **kwargs go to `self.solve()`, which are not used yet. Examples -------- @@ -139,10 +139,6 @@ def run(self, force_init=False, no_code=True, Parameters ---------- - force_init : bool - Force initialization. - no_code : bool - Disable showing code. method : str Placeholder for future use. @@ -152,9 +148,10 @@ def run(self, force_init=False, no_code=True, Exit code of the routine. """ if not self.initialized: - self.init(force=force_init, no_code=no_code) + self.init() t0, _ = elapsed() - res, sstats = self.solve(method=method) + + res, sstats = self.solve(**kwargs) self.converged = res['success'] self.exit_code = 0 if res['success'] else 1 _, s = elapsed(t0) @@ -167,8 +164,8 @@ def run(self, force_init=False, no_code=True, logger.info(msg) try: self.unpack(res) - except Exception: - logger.warning(f"Failed to unpack results from {self.class_name}.") + except Exception as e: + logger.error(f"Failed to unpack results from {self.class_name}.\n{e}") return False return True else: diff --git a/ams/routines/ed.py b/ams/routines/ed.py index 02f17c1a..24b4b989 100644 --- a/ams/routines/ed.py +++ b/ams/routines/ed.py @@ -2,7 +2,6 @@ Economic dispatch routines. """ import logging -from collections import OrderedDict import numpy as np from ams.core.param import RParam @@ -92,15 +91,10 @@ def __init__(self) -> None: name='RR30', tex_name=r'R_{30,R}', info='Repeated ramp rate', no_parse=True,) - self.ctrl.expand_dims = 1 - self.c0.expand_dims = 1 - self.pmax.expand_dims = 1 - self.pmin.expand_dims = 1 - self.pg0.expand_dims = 1 - self.rate_a.expand_dims = 1 - self.Pfinj.expand_dims = 1 - self.Pbusinj.expand_dims = 1 - self.gsh.expand_dims = 1 + items_to_expand = ['ctrl', 'c0', 'pmax', 'pmin', 'pg0', 'rate_a', + 'Pfinj', 'Pbusinj', 'gsh', 'ul'] + for item in items_to_expand: + self.__dict__[item].expand_dims = 1 # NOTE: extend pg to 2D matrix: row for gen and col for timeslot self.pg.horizon = self.timeslot @@ -120,13 +114,15 @@ class ED(RTED, MPBase, SRBase): - Vars ``pg``, ``pru``, ``prd`` are extended to 2D - 2D Vars ``rgu`` and ``rgd`` are introduced - - Param ``ug`` is sourced from ``EDTSlot.ug`` as commitment decisions + - Param ``ug`` is sourced from ``EDTSlot.ug`` as generator commitment Notes ----- 1. Formulations has been adjusted with interval ``config.t`` 2. The tie-line flow is not implemented in this model. + + 3. `EDTSlot.ug` is used instead of `StaticGen.u` for generator commitment. """ def __init__(self, system, config): @@ -134,8 +130,7 @@ def __init__(self, system, config): MPBase.__init__(self) SRBase.__init__(self) - self.config.add(OrderedDict((('t', 1), - ))) + self.config.t = 1 # scheduling interval in hour self.config.add_extra("_help", t="time interval in hours", ) @@ -182,8 +177,8 @@ def __init__(self, system, config): # --- line --- self.plf.horizon = self.timeslot self.plf.info = '2D Line flow' - self.plflb.e_str = '-Bf@aBus - Pfinj@tlv - rate_a@tlv' - self.plfub.e_str = 'Bf@aBus + Pfinj@tlv - rate_a@tlv' + self.plflb.e_str = '-Bf@aBus - Pfinj@tlv - mul(ul, rate_a)@tlv' + self.plfub.e_str = 'Bf@aBus + Pfinj@tlv - mul(ul, rate_a)@tlv' self.alflb.e_str = '-CftT@aBus + amin@tlv' self.alfub.e_str = 'CftT@aBus - amax@tlv' @@ -196,19 +191,19 @@ def __init__(self, system, config): self.rbu.e_str = 'gs@mul(ugt, pru) - mul(dud, tlv)' self.rbd.e_str = 'gs@mul(ugt, prd) - mul(ddd, tlv)' - self.rru.e_str = 'mul(ugt, pg + pru) - mul(pmax, tlv)' - self.rrd.e_str = 'mul(ugt, -pg + prd) + mul(pmin, tlv)' + self.rru.e_str = 'pg + pru - mul(mul(ugt, pmax), tlv)' + self.rrd.e_str = '-pg + prd + mul(mul(ugt, pmin), tlv)' self.rgu.e_str = 'pg @ Mr - t dot RR30' self.rgd.e_str = '-pg @ Mr - t dot RR30' self.rgu0 = Constraint(name='rgu0', info='Initial gen ramping up', - e_str='pg[:, 0] - pg0[:, 0] - R30', + e_str='mul(ugt[:, 0], pg[:, 0] - pg0[:, 0] - R30)', is_eq=False,) self.rgd0 = Constraint(name='rgd0', info='Initial gen ramping down', - e_str='- pg[:, 0] + pg0[:, 0] - R30', + e_str='mul(ugt[:, 0], -pg[:, 0] + pg0[:, 0] - R30)', is_eq=False,) # --- objective --- @@ -247,8 +242,6 @@ def __init__(self, system, config): ED.__init__(self, system, config) DGBase.__init__(self) - self.config.t = 1 # scheduling interval in hour - self.info = 'Economic dispatch with distributed generation' self.type = 'DCED' @@ -301,8 +294,6 @@ def __init__(self, system, config): ED.__init__(self, system, config) ESD1MPBase.__init__(self) - self.config.t = 1 # scheduling interval in hour - self.info = 'Economic dispatch with energy storage' self.type = 'DCED' diff --git a/ams/routines/pflow.py b/ams/routines/pflow.py index 057b8c92..68ec9bd2 100644 --- a/ams/routines/pflow.py +++ b/ams/routines/pflow.py @@ -59,7 +59,7 @@ def __init__(self, system, config): model="StaticGen", src="q",) # NOTE: omit AC power flow formulation here - def solve(self, method="newton"): + def solve(self, method="newton", **kwargs): """ Solve the AC power flow using PYPOWER. """ @@ -73,12 +73,12 @@ def solve(self, method="newton"): if alg is None: msg = f"Invalid method `{method}` for PFlow." raise ValueError(msg) - ppopt = ppoption(PF_ALG=alg, ENFORCE_Q_LIMS=self.config.qlim) + ppopt = ppoption(PF_ALG=alg, ENFORCE_Q_LIMS=self.config.qlim, **kwargs) res, sstats = runpf(casedata=ppc, ppopt=ppopt) return res, sstats - def run(self, force_init=False, no_code=True, method="newton", **kwargs): + def run(self, **kwargs): """ Run AC power flow using PYPOWER. @@ -108,6 +108,4 @@ def run(self, force_init=False, no_code=True, method="newton", **kwargs): exit_code : int Exit code of the routine. """ - return super().run(force_init=force_init, - no_code=no_code, method=method, - **kwargs,) + return super().run(**kwargs,) diff --git a/ams/routines/routine.py b/ams/routines/routine.py index d09895ef..60b04785 100644 --- a/ams/routines/routine.py +++ b/ams/routines/routine.py @@ -10,7 +10,6 @@ import numpy as np from andes.core import Config -from andes.shared import pd from andes.utils.misc import elapsed from ams.core.param import RParam @@ -19,6 +18,7 @@ from ams.core.service import RBaseService, ValueService from ams.opt.omodel import OModel, Param, Var, Constraint, Objective, ExpressionCalc +from ams.shared import pd logger = logging.getLogger(__name__) @@ -26,9 +26,68 @@ class RoutineBase: """ Class to hold descriptive routine models and data mapping. + + Attributes + ---------- + system : Optional[Type] + The system object associated with the routine. + config : Config + Configuration object for the routine. + info : Optional[str] + Information about the routine. + tex_names : OrderedDict + LaTeX names for the routine parameters. + syms : SymProcessor + Symbolic processor for the routine. + _syms : bool + Flag indicating whether symbols have been generated. + rparams : OrderedDict + Registry for RParam objects. + services : OrderedDict + Registry for service objects. + params : OrderedDict + Registry for Param objects. + vars : OrderedDict + Registry for Var objects. + constrs : OrderedDict + Registry for Constraint objects. + exprs : OrderedDict + Registry for Expression objects. + obj : Optional[Objective] + Objective of the routine. + initialized : bool + Flag indicating whether the routine has been initialized. + type : str + Type of the routine. + docum : RDocumenter + Documentation generator for the routine. + map1 : OrderedDict + Mapping from ANDES. + map2 : OrderedDict + Mapping to ANDES. + om : OModel + Optimization model for the routine. + exec_time : float + Execution time of the routine. + exit_code : int + Exit code of the routine. + converged : bool + Flag indicating whether the routine has converged. + converted : bool + Flag indicating whether AC conversion has been performed. """ def __init__(self, system=None, config=None): + """ + Initialize the routine. + + Parameters + ---------- + system : Optional[Type] + The system object associated with the routine. + config : Optional[dict] + Configuration dictionary for the routine. + """ self.system = system self.config = Config(self.class_name) self.info = None @@ -213,6 +272,7 @@ def _data_check(self, info=True): info: bool Whether to print warning messages. """ + logger.debug(f"Entering data check for <{self.class_name}>") no_input = [] owner_list = [] for rname, rparam in self.rparams.items(): @@ -230,28 +290,33 @@ def _data_check(self, info=True): if len(no_input) > 0: if info: msg = f"Following models are missing in input: {set(owner_list)}" - logger.warning(msg) + logger.error(msg) return False # TODO: add data validation for RParam, typical range, etc. + logger.debug(" -> Data check passed") return True - def init(self, force=False, no_code=True, **kwargs): + def init(self, **kwargs): """ Initialize the routine. - Force initialization (`force=True`) will do the following: - - Rebuild the system matrices - - Enable all constraints - - Reinitialize the optimization model - - Parameters - ---------- + Other parameters + ---------------- force: bool - Whether to force initialization. - no_code: bool - Whether to show generated code. + Whether to force initialization regardless of the current initialization status. + force_mats: bool + Whether to force build the system matrices, goes to `self.system.mats.build()`. + force_constr: bool + Whether to turn on all constraints. + force_om: bool + Whether to force initialize the optimization model. """ - skip_all = (not force) and self.initialized and self.om.initialized + force = kwargs.pop('force', False) + force_mats = kwargs.pop('force_mats', False) + force_constr = kwargs.pop('force_constr', False) + force_om = kwargs.pop('force_om', False) + + skip_all = not (force and force_mats) and self.initialized if skip_all: logger.debug(f"{self.class_name} has already been initialized.") @@ -259,30 +324,25 @@ def init(self, force=False, no_code=True, **kwargs): t0, _ = elapsed() # --- data check --- - if self._data_check(): - logger.debug(f"{self.class_name} data check passed.") - else: - msg = f"{self.class_name} data check failed, setup may run into error!" - logger.warning(msg) + self._data_check() - # --- force initialization --- - if force: - self.system.mats.build() + # --- turn on all constrs --- + if force_constr: for constr in self.constrs.values(): constr.is_disabled = False # --- matrix build --- - if not self.system.mats.initialized: - self.system.mats.build() + self.system.mats.build(force=force_mats) # --- constraint check --- _ = self._get_off_constrs() - om_init = self.om.init(no_code=no_code) + if not self.om.initialized: + self.om.init(force=force_om) _, s_init = elapsed(t0) msg = f"<{self.class_name}> " - if om_init: + if self.om.initialized: msg += f"initialized in {s_init}." self.initialized = True else: @@ -295,23 +355,24 @@ def solve(self, **kwargs): """ Solve the routine optimization model. """ - return True + raise NotImplementedError def unpack(self, **kwargs): """ Unpack the results. """ - return None + raise NotImplementedError def _post_solve(self): """ Post-solve calculation. """ - return None + raise NotImplementedError - def run(self, force_init=False, no_code=True, **kwargs): + def run(self, **kwargs): """ Run the routine. + args and kwargs go to `self.solve()`. Force initialization (`force_init=True`) will do the following: - Rebuild the system matrices @@ -321,12 +382,22 @@ def run(self, force_init=False, no_code=True, **kwargs): Parameters ---------- force_init: bool - Whether to force initialization. - no_code: bool - Whether to show generated code. + Whether to force re-initialize the routine. + force_mats: bool + Whether to force build the system matrices. + force_constr: bool + Whether to turn on all constraints. + force_om: bool + Whether to force initialize the OModel. """ # --- setup check --- - self.init(force=force_init, no_code=no_code) + force_init = kwargs.pop('force_init', False) + force_mats = kwargs.pop('force_mats', False) + force_constr = kwargs.pop('force_constr', False) + force_om = kwargs.pop('force_om', False) + self.init(force=force_init, force_mats=force_mats, + force_constr=force_constr, force_om=force_om) + # --- solve optimization --- t0, _ = elapsed() _ = self.solve(**kwargs) @@ -514,7 +585,7 @@ def update(self, params=None, build_mats=True,): if no system matrices are changed. """ t0, _ = elapsed() - re_init = False + re_finalize = False # sanitize input sparams = [] if params is None: @@ -528,16 +599,19 @@ def update(self, params=None, build_mats=True,): sparams = [self.params[param] for param in params if isinstance(param, str)] for param in sparams: param.update() + for param in sparams: if param.optz is None: # means no_parse=True - re_init = True + re_finalize = True break - if build_mats: - self.system.mats.build() - if re_init: + + self.system.mats.build(force=build_mats) + + if re_finalize: logger.warning(f"<{self.class_name}> reinit OModel due to non-parametric change.") - self.om.parsed = False - _ = self.om.init(no_code=True) + self.om.evaluate(force=True) + self.om.finalize(force=True) + results = self.om.update(params=sparams) t0, s0 = elapsed(t0) logger.debug(f"Update params in {s0}.") diff --git a/ams/routines/rted.py b/ams/routines/rted.py index 0b156388..c04ce86f 100644 --- a/ams/routines/rted.py +++ b/ams/routines/rted.py @@ -199,20 +199,20 @@ def dc2ac(self, kloss=1.0, **kwargs): pq_idx = self.system.StaticLoad.get_idx() pd0 = self.system.StaticLoad.get(src='p0', attr='v', idx=pq_idx).copy() qd0 = self.system.StaticLoad.get(src='q0', attr='v', idx=pq_idx).copy() - self.system.StaticLoad.set(src='p0', attr='v', idx=pq_idx, value=pd0 * kloss) - self.system.StaticLoad.set(src='q0', attr='v', idx=pq_idx, value=qd0 * kloss) + self.system.StaticLoad.set(src='p0', idx=pq_idx, attr='v', value=pd0 * kloss) + self.system.StaticLoad.set(src='q0', idx=pq_idx, attr='v', value=qd0 * kloss) # preserve generator reserve ACOPF = self.system.ACOPF pmin = pmin0 + self.prd.v pmax = pmax0 - self.pru.v - self.system.StaticGen.set(src='pmin', attr='v', idx=pr_idx, value=pmin) - self.system.StaticGen.set(src='pmax', attr='v', idx=pr_idx, value=pmax) - self.system.StaticGen.set(src='p0', attr='v', idx=pr_idx, value=self.pg.v) + self.system.StaticGen.set(src='pmin', idx=pr_idx, attr='v', value=pmin) + self.system.StaticGen.set(src='pmax', idx=pr_idx, attr='v', value=pmax) + self.system.StaticGen.set(src='p0', idx=pr_idx, attr='v', value=self.pg.v) # run ACOPF ACOPF.run() # scale load back - self.system.StaticLoad.set(src='p0', attr='v', idx=pq_idx, value=pd0) - self.system.StaticLoad.set(src='q0', attr='v', idx=pq_idx, value=qd0) + self.system.StaticLoad.set(src='p0', idx=pq_idx, attr='v', value=pd0) + self.system.StaticLoad.set(src='q0', idx=pq_idx, attr='v', value=qd0) if not ACOPF.exit_code == 0: logger.warning(' did not converge, conversion failed.') # NOTE: mock results to fit interface with ANDES @@ -227,14 +227,15 @@ def dc2ac(self, kloss=1.0, **kwargs): info='Bus voltage', unit='p.u.', model='Bus', src='v',) self.vBus.parse() + self.vBus.evaluate() self.vBus.optz.value = ACOPF.vBus.v self.aBus.optz.value = ACOPF.aBus.v self.exec_time = exec_time # reset pmin, pmax, p0 - self.system.StaticGen.set(src='pmin', attr='v', idx=pr_idx, value=pmin0) - self.system.StaticGen.set(src='pmax', attr='v', idx=pr_idx, value=pmax0) - self.system.StaticGen.set(src='p0', attr='v', idx=pr_idx, value=p00) + self.system.StaticGen.set(src='pmin', idx=pr_idx, attr='v', value=pmin0) + self.system.StaticGen.set(src='pmax', idx=pr_idx, attr='v', value=pmax0) + self.system.StaticGen.set(src='p0', idx=pr_idx, attr='v', value=p00) # --- set status --- self.system.recent = self diff --git a/ams/routines/uc.py b/ams/routines/uc.py index ba5735d4..f1b29827 100644 --- a/ams/routines/uc.py +++ b/ams/routines/uc.py @@ -304,13 +304,16 @@ def _initial_guess(self): if len(g_idx) == 0: g_idx = priority[0] ug0 = 0 - self.system.StaticGen.set(src='u', attr='v', idx=g_idx, value=ug0) - logger.warning(f'Turn off StaticGen {g_idx} as initial commitment guess.') - return True + off_gen = f'{g_idx}' + else: + off_gen = ', '.join(g_idx) + self.system.StaticGen.set(src='u', idx=g_idx, attr='v', value=ug0) + logger.warning(f"As initial commitment guess, turn off StaticGen: {off_gen}") + return g_idx - def init(self, force=False, no_code=True, **kwargs): + def init(self, **kwargs): self._initial_guess() - return super().init(force=force, no_code=no_code, **kwargs) + return super().init(**kwargs) def dc2ac(self, **kwargs): """ diff --git a/ams/shared.py b/ams/shared.py index a9ec87e2..afe3c92e 100644 --- a/ams/shared.py +++ b/ams/shared.py @@ -4,35 +4,45 @@ This module is supplementary to the ``andes.shared`` module. """ import logging +import unittest from functools import wraps from datetime import datetime from collections import OrderedDict import cvxpy as cp -from andes.shared import pd from andes.utils.lazyimport import LazyImport +from andes.system import System as adSystem + + logger = logging.getLogger(__name__) sps = LazyImport('import scipy.sparse as sps') np = LazyImport('import numpy as np') +pd = LazyImport('import pandas as pd') + +# --- an empty ANDES system --- +empty_adsys = adSystem() +ad_models = list(empty_adsys.models.keys()) # --- NumPy constants --- # NOTE: In NumPy 2.0, np.Inf and np.NaN are deprecated. inf = np.inf nan = np.nan -# NOTE: copied from CVXPY documentation -MIP_SOLVERS = ['CBC', 'COPT', 'GLPK_MI', 'CPLEX', 'GUROBI', - 'MOSEK', 'SCIP', 'XPRESS', 'SCIPY'] - -INSTALLED_SOLVERS = cp.installed_solvers() - # NOTE: copyright year_end = datetime.now().year copyright_msg = f'Copyright (C) 2023-{year_end} Jinning Wang' +# NOTE: copied from CVXPY documentation, last checked on 2024/10/30, v1.5 +mip_solvers = ['CBC', 'COPT', 'GLPK_MI', 'CPLEX', 'GUROBI', + 'MOSEK', 'SCIP', 'XPRESS', 'SCIPY'] + +installed_solvers = cp.installed_solvers() + +installed_mip_solvers = [s for s in installed_solvers if s in mip_solvers] + def require_MIP_solver(f): """ @@ -41,13 +51,26 @@ def require_MIP_solver(f): @wraps(f) def wrapper(*args, **kwargs): - if not any(s in MIP_SOLVERS for s in INSTALLED_SOLVERS): + if not any(s in mip_solvers for s in installed_solvers): raise ModuleNotFoundError("No MIP solver is available.") return f(*args, **kwargs) return wrapper +def skip_unittest_without_MIP(f): + """ + Decorator for skipping tests that require MIP solver. + """ + def wrapper(*args, **kwargs): + if any(s in mip_solvers for s in installed_solvers): + pass + else: + raise unittest.SkipTest("No MIP solver is available.") + return f(*args, **kwargs) + return wrapper + + ppc_cols = OrderedDict([ ('bus', ['bus_i', 'type', 'pd', 'qd', 'gs', 'bs', 'area', 'vm', 'va', 'baseKV', 'zone', 'vmax', 'vmin', 'lam_p', 'lam_q', diff --git a/ams/system.py b/ams/system.py index ba243b9c..f14603b9 100644 --- a/ams/system.py +++ b/ams/system.py @@ -17,14 +17,14 @@ from andes.utils.misc import elapsed from andes.utils.tab import Tab -import ams.io +import ams from ams.models.group import GroupBase from ams.routines.type import TypeBase from ams.models import file_classes from ams.routines import all_routines from ams.utils.paths import get_config_path from ams.core.matprocessor import MatProcessor -from ams.interop.andes import to_andes +from ams.interface import to_andes from ams.report import Report logger = logging.getLogger(__name__) @@ -436,10 +436,10 @@ def setup(self): # assign bus type as placeholder; 1=PQ, 2=PV, 3=ref, 4=isolated if self.Bus.type.v.sum() == self.Bus.n: # if all type are PQ - self.Bus.set(src='type', attr='v', idx=self.PV.bus.v, - value=np.ones(self.PV.n)) - self.Bus.set(src='type', attr='v', idx=self.Slack.bus.v, - value=np.ones(self.Slack.n)) + self.Bus.alter(src='type', idx=self.PV.bus.v, + value=np.ones(self.PV.n)) + self.Bus.alter(src='type', idx=self.Slack.bus.v, + value=np.ones(self.Slack.n)) # --- assign column and row names --- self.mats.Cft.col_names = self.Line.idx.v diff --git a/ams/utils/__init__.py b/ams/utils/__init__.py index e4a4f908..a6877af8 100644 --- a/ams/utils/__init__.py +++ b/ams/utils/__init__.py @@ -11,3 +11,49 @@ def wrapper(*args, **kwargs): _, s = elapsed(t0) return result, s return wrapper + + +def create_entry(*fields, three_params=True): + """ + Helper function to create a list of fields for a model entry. + + Parameters + ---------- + fields : tuple + Additional fields to include in the list. + three_params : bool, optional + Whether to include 'idx', 'u', 'name' in the list (default is True). + + Returns + ------- + list + A list of fields for the model entry. + """ + base_fields = ['idx', 'u', 'name'] if three_params else [] + return base_fields + list(fields) + + +def pretty_long_message(message, prefix="", max_length=80): + """ + Pretty print a long message. + + Parameters + ---------- + message : str + The message to format. + prefix : str, optional + A prefix to add to each line of the message. + max_length : int, optional + The maximum length of each line. + + Returns + ------- + str + The formatted message. + """ + if len(message) <= max_length: + return message + else: + lines = [message[i:i+max_length] for i in range(0, len(message), max_length)] + formatted_message = lines[0] + "\n" + "\n".join([prefix + line for line in lines[1:]]) + return formatted_message diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 1c81cb90..23724202 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -88,6 +88,6 @@ jobs: conda install -c conda-forge kvxopt python -m pip install --upgrade pip pip install pytest pytest-azurepipelines - pip install .[dev,interop] + pip install .[dev] pytest displayName: pytest \ No newline at end of file diff --git a/docs/source/api.rst b/docs/source/api.rst index 9fabf3cd..ba11937c 100644 --- a/docs/source/api.rst +++ b/docs/source/api.rst @@ -44,6 +44,8 @@ Routines :template: autosummary/module_toctree.rst ams.routines.routine + ams.routines.dcopf + ams.routines.pflow Optimization @@ -56,7 +58,7 @@ Optimization :caption: Optimization :template: autosummary/module_toctree.rst - ams.opt.omodel + ams.opt I/O @@ -82,7 +84,7 @@ Interoperability :caption: Interoperability :template: autosummary/module_toctree.rst - ams.interop + ams.interface Others diff --git a/docs/source/conf.py b/docs/source/conf.py index e99a8545..b332c5c9 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -164,7 +164,7 @@ # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'ams', 'AMS Manual', - author, 'ams', 'Python Software for Scheduling Modeling and Co-Simulation with Dynanic', + author, 'ams', 'Python Software for Scheduling Modeling and Co-Simulation with Dynanics', 'Miscellaneous'), ] diff --git a/docs/source/getting_started/install.rst b/docs/source/getting_started/install.rst index 7c02f449..21f6d138 100644 --- a/docs/source/getting_started/install.rst +++ b/docs/source/getting_started/install.rst @@ -172,8 +172,8 @@ Note the dot at the end. Pip will take care of the rest. .. note:: To install extra support packages, one can append ``[NAME_OF_EXTRA]`` to - ``pip install -e .``. For example, ``pip install -e .[interop]`` will - install packages to support interoperability when installing AMS in the + ``pip install -e .``. For example, ``pip install -e .[doc]`` will + install packages to support documentation when installing AMS in the development, editable mode. Updating AMS diff --git a/docs/source/modeling/example.rst b/docs/source/modeling/example.rst index a774da7c..c804d801 100644 --- a/docs/source/modeling/example.rst +++ b/docs/source/modeling/example.rst @@ -104,7 +104,7 @@ Model Section # --- power balance --- pb = 'Bbus@aBus + Pbusinj + Cl@pd + Csh@gsh - Cg@pg' self.pb = Constraint(name='pb', info='power balance', - e_str=pb, type='eq',) + e_str=pb, is_eq=True,) # --- line flow --- self.plf = Var(info='Line flow', unit='p.u.', diff --git a/docs/source/modeling/routine.rst b/docs/source/modeling/routine.rst index 7fcb736e..a257bebe 100644 --- a/docs/source/modeling/routine.rst +++ b/docs/source/modeling/routine.rst @@ -24,7 +24,7 @@ A simplified code snippet for RTED is shown below as an example. info='Sum Gen vars vector in shape of zone', no_parse=True, sparse=True) ... ... - self.rbu = Constraint(name='rbu', type='eq', + self.rbu = Constraint(name='rbu', is_eq=True, info='RegUp reserve balance', e_str = 'gs @ mul(ug, pru) - dud') ... ... @@ -58,13 +58,6 @@ Further, to facilitate the routine definition, AMS developed a class .. autoclass:: ams.core.param.RParam :noindex: -.. currentmodule:: ams.routines -.. autosummary:: - :recursive: - :toctree: _generated - - RoutineBase - Numerical Optimization ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -87,7 +80,7 @@ Interoperation with ANDES ----------------------------------- The interoperation with dynamic simulator invovles both file conversion and data exchange. -In AMS, the built-in interface with ANDES is implemented in :py:mod:`ams.interop.andes`. +In AMS, the built-in interface with ANDES is implemented in :py:mod:`ams.interface`. File Format Converter @@ -98,7 +91,7 @@ where it defines grid topology and power flow. An AMS case can be converted to an ANDES case, with the option to supply additional dynamic data. -.. autofunction:: ams.interop.andes.to_andes +.. autofunction:: ams.interface.to_andes :noindex: @@ -112,10 +105,10 @@ The maping relationship for a specific routine is defined in the routine class a ``map2``. Additionally, a link table for the ANDES case is used for the controller connections. -Module :py:mod:`ams.interop.andes.Dynamic`, contains the necessary functions and classes for +Module :py:mod:`ams.interface.Dynamic`, contains the necessary functions and classes for file conversion and data exchange. -.. autoclass:: ams.interop.andes.Dynamic +.. autoclass:: ams.interface.Dynamic :noindex: :members: send, receive diff --git a/docs/source/release-notes.rst b/docs/source/release-notes.rst index e90f6c22..8bcdb1fa 100644 --- a/docs/source/release-notes.rst +++ b/docs/source/release-notes.rst @@ -9,12 +9,32 @@ The APIs before v3.0.0 are in beta and may change without prior notice. Pre-v1.0.0 ========== +v0.9.11 (2024-11-14) +-------------------- + +- Add pyproject.toml for PEP 517 and PEP 518 compliance +- Add model `Jumper` +- Fix deprecation warning related to `pandas.fillna` and `newshape` in NumPy +- Minor refactor on solvers information in the module `shared` +- Change default values of minimum ON/OFF duration time of generators to be 1 and 0.5 hours +- Add parameter `uf` for enforced generator on/off status +- In servicee `LoadScale`, consider load online status +- Consider line online status in routine `ED` +- Add methods `evaluate` and `finalize` in the class `OModel` to handle optimization elements generation and assembling +- Refactor `OModel.init()` and `Routine.init()` +- Add ANDES paper as a citation file for now +- Add more routine tests for generator trip, line trip, and load trip +- Add a README to overview built-in cases +- Rename methods `v2` as `e` for classes `Constraint` and `Objective` +- Add benchmark functions +- Improve using of `eval()` in module `omodel` +- Refactor module `interop.andes` as module `interface` for simplicity + v0.9.10 (2024-09-03) -------------------- Hotfix of import issue in ``v0.9.9``. -Features developed in ``v0.9.9``: - In module `MatProcessor`, add two parameters `permc_spec` and `use_umfpack` in function `build_ptdf` - Follow RTD's deprecation of Sphinx context injection at build time - In MATPOWER conversion, set devices name as None diff --git a/examples/demonstration/demo_AGC.ipynb b/examples/demonstration/demo_AGC.ipynb index e7952732..96f480ff 100644 --- a/examples/demonstration/demo_AGC.ipynb +++ b/examples/demonstration/demo_AGC.ipynb @@ -410,7 +410,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -450,8 +450,8 @@ " columns=out_cols)\n", "\n", "# --- AMS settings ---\n", - "sp.SFR.set(src='du', attr='v', idx=sp.SFR.idx.v, value=0.0015*np.ones(sp.SFR.n))\n", - "sp.SFR.set(src='dd', attr='v', idx=sp.SFR.idx.v, value=0.0015*np.ones(sp.SFR.n))\n", + "sp.SFR.set(src='du', idx=sp.SFR.idx.v, attr='v', value=0.0015*np.ones(sp.SFR.n))\n", + "sp.SFR.set(src='dd', idx=sp.SFR.idx.v, attr='v', value=0.0015*np.ones(sp.SFR.n))\n", "\n", "# --- ANDES settings ---\n", "sa.TDS.config.no_tqdm = True # turn off ANDES progress bar\n", @@ -464,13 +464,13 @@ "# NOTE: might run into error if there exists a TurbineGov model that does not have \"VMAX\"\n", "tbgov_src = [mdl.idx.v for mdl in sa.TurbineGov.models.values()]\n", "tbgov_idx = list(chain.from_iterable(tbgov_src))\n", - "sa.TurbineGov.set(src='VMAX', attr='v', idx=tbgov_idx,\n", + "sa.TurbineGov.set(src='VMAX', idx=tbgov_idx, attr='v',\n", " value=9999 * np.ones(sa.TurbineGov.n),)\n", - "sa.TurbineGov.set(src='VMIN', attr='v', idx=tbgov_idx,\n", + "sa.TurbineGov.set(src='VMIN', idx=tbgov_idx, attr='v',\n", " value=np.zeros(sa.TurbineGov.n),)\n", "syg_src = [mdl.idx.v for mdl in sa.SynGen.models.values()]\n", "syg_idx = list(chain.from_iterable(syg_src))\n", - "sa.SynGen.set(src='ra', attr='v', idx=syg_idx,\n", + "sa.SynGen.set(src='ra', idx=syg_idx, attr='v',\n", " value=np.zeros(sa.SynGen.n),)\n", "\n", "# use constant power model for PQ\n", @@ -538,7 +538,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -664,12 +664,8 @@ " # use 5-min average load in dispatch solution\n", " load_avg = load_coeff[t:t+RTED_interval].mean()\n", " # set load in to AMS\n", - " sp.PQ.set(src='p0', attr='v',\n", - " value=load_avg * p0_sp,\n", - " idx=pq_idx)\n", - " sp.PQ.set(src='q0', attr='v',\n", - " value=load_avg * q0_sp,\n", - " idx=pq_idx)\n", + " sp.PQ.set(src='p0', idx=pq_idx, attr='v', value=load_avg * p0_sp)\n", + " sp.PQ.set(src='q0', idx=pq_idx, attr='v', value=load_avg * q0_sp)\n", " print(f\"--AMS: update disaptch load with factor {load_avg:.6f}.\")\n", "\n", " # get dynamic generator output from TDS\n", @@ -707,23 +703,17 @@ "\n", " # set into governor, Exclude NaN values for governor index\n", " gov_to_set = {gov: pgov for gov, pgov in zip(maptab['gov_idx'], maptab['pgov']) if bool(gov)}\n", - " sa.TurbineGov.set(src='pref0', attr='v',\n", - " idx=list(gov_to_set.keys()),\n", - " value=list(gov_to_set.values()))\n", + " sa.TurbineGov.set(src='pref0', idx=list(gov_to_set.keys()), attr='v', value=list(gov_to_set.values()))\n", " print(f\"--ANDES: update TurbineGov reference.\")\n", "\n", " # set into dg, Exclude NaN values for dg index\n", " dg_to_set = {dg: pdg for dg, pdg in zip(maptab['dg_idx'], maptab['pdg']) if bool(dg)}\n", - " sa.DG.set(src='pref0', attr='v',\n", - " idx=list(dg_to_set.keys()),\n", - " value=list(dg_to_set.values()))\n", + " sa.DG.set(src='pref0', idx=list(dg_to_set.keys()), attr='v', value=list(dg_to_set.values()))\n", " print(f\"--ANDES: update DG reference.\")\n", "\n", " # set into rg, Exclude NaN values for rg index\n", " rg_to_set = {rg: prg for rg, prg in zip(maptab['rg_idx'], maptab['prg']) if bool(rg)}\n", - " sa.RenGen.set(src='Pref', attr='v',\n", - " idx=list(rg_to_set.keys()),\n", - " value=list(rg_to_set.values()))\n", + " sa.RenGen.set(src='Pref', idx=list(rg_to_set.keys()), attr='v', value=list(rg_to_set.values()))\n", " print(f\"--ANDES: update RenGen reference.\")\n", "\n", " # record dispatch data\n", @@ -755,30 +745,22 @@ "\n", " # set into governor, Exclude NaN values for governor index\n", " agov_to_set = {gov: agov for gov, agov in zip(maptab['gov_idx'], maptab['agov']) if bool(gov)}\n", - " sa.TurbineGov.set(src='paux0', attr='v',\n", - " idx=list(agov_to_set.keys()),\n", - " value=list(agov_to_set.values()))\n", + " sa.TurbineGov.set(src='paux0', idx=list(agov_to_set.keys()), attr='v', value=list(agov_to_set.values()))\n", "\n", " # set into dg, Exclude NaN values for dg index\n", " adg_to_set = {dg: adg for dg, adg in zip(maptab['dg_idx'], maptab['adg']) if bool(dg)}\n", - " sa.DG.set(src='Pext0', attr='v',\n", - " idx=list(adg_to_set.keys()),\n", - " value=list(adg_to_set.values()))\n", + " sa.DG.set(src='Pext0', idx=list(adg_to_set.keys()), attr='v', value=list(adg_to_set.values()))\n", "\n", " # set into rg, Exclude NaN values for rg index\n", " arg_to_set = {rg: arg + prg for rg, arg,\n", " prg in zip(maptab['rg_idx'], maptab['arg'], maptab['prg']) if bool(rg)}\n", - " sa.RenGen.set(src='Pref', attr='v',\n", - " idx=list(arg_to_set.keys()),\n", - " value=list(arg_to_set.values()))\n", + " sa.RenGen.set(src='Pref', idx=list(arg_to_set.keys()), attr='v', value=list(arg_to_set.values()))\n", "\n", " # --- TDS interval ---\n", " if t > 0: # --- run TDS ---\n", " # set laod into PQ.Ppf and PQ.Qpf\n", - " sa.PQ.set(src='Ppf', attr='v', idx=pq_idx,\n", - " value=load_coeff[t] * p0_sa)\n", - " sa.PQ.set(src='Qpf', attr='v', idx=pq_idx,\n", - " value=load_coeff[t] * q0_sa)\n", + " sa.PQ.set(src='Ppf', idx=pq_idx, attr='v', value=load_coeff[t] * p0_sa)\n", + " sa.PQ.set(src='Qpf', idx=pq_idx, attr='v', value=load_coeff[t] * q0_sa)\n", " sa.TDS.config.tf = t\n", " sa.TDS.run()\n", " # Update AGC PI controller\n", @@ -803,22 +785,16 @@ " break\n", " else: # --- init TDS ---\n", " # set pg to StaticGen.p0\n", - " sa.StaticGen.set(src='p0', attr='v', value=sp.RTED.pg.v,\n", - " idx=sp.RTED.pg.get_idx())\n", + " sa.StaticGen.set(src='p0', idx=sp.RTED.pg.get_idx(), attr='v', value=sp.RTED.pg.v)\n", " # set Bus.v to StaticGen.v\n", " bus_stg = sp.StaticGen.get(src='bus', attr='v', idx=sp.StaticGen.get_idx())\n", " v_stg = sp.Bus.get(src='v', attr='v', idx=bus_stg)\n", - " sa.StaticGen.set(src='v0', attr='v', value=v_stg,\n", - " idx=sp.StaticGen.get_idx())\n", + " sa.StaticGen.set(src='v0', idx=sp.StaticGen.get_idx(), attr='v', value=v_stg)\n", " # set vBus to Bus\n", - " sa.Bus.set(src='v0', attr='v',\n", - " value=sp.RTED.vBus.v,\n", - " idx=sp.RTED.vBus.get_idx())\n", + " sa.Bus.set(src='v0', idx=sp.RTED.vBus.get_idx(), attr='v', value=sp.RTED.vBus.v)\n", " # set load into PQ.p0 and PQ.q0\n", - " sa.PQ.set(src='p0', attr='v', idx=pq_idx,\n", - " value=load_coeff[t] * p0_sa)\n", - " sa.PQ.set(src='q0', attr='v', idx=pq_idx,\n", - " value=load_coeff[t] * q0_sa)\n", + " sa.PQ.set(src='p0', idx=pq_idx, attr='v', value=load_coeff[t] * p0_sa)\n", + " sa.PQ.set(src='q0', idx=pq_idx, attr='v', value=load_coeff[t] * q0_sa)\n", " sa.PFlow.run() # run power flow\n", " sa.TDS.init() # initialize TDS\n", "\n", diff --git a/examples/ex1.ipynb b/examples/ex1.ipynb index b590e8a7..b114b680 100644 --- a/examples/ex1.ipynb +++ b/examples/ex1.ipynb @@ -422,13 +422,13 @@ "Then, one can solve it by calling ``run()``.\n", "Here, argument `solver` can be passed to specify the solver to use, such as `solver='ECOS'`.\n", "\n", - "Installed solvers can be listed by ``ams.shared.INSTALLED_SOLVERS``,\n", + "Installed solvers can be listed by ``ams.shared.installed_solvers``,\n", "and more detailes of solver can be found at [CVXPY-Choosing a solver](https://www.cvxpy.org/tutorial/advanced/index.html#choosing-a-solver)." ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -443,7 +443,7 @@ } ], "source": [ - "ams.shared.INSTALLED_SOLVERS" + "ams.shared.installed_solvers" ] }, { diff --git a/examples/ex2.ipynb b/examples/ex2.ipynb index bfa427c3..7979d9f5 100644 --- a/examples/ex2.ipynb +++ b/examples/ex2.ipynb @@ -353,14 +353,12 @@ "\n", "`set`: This method ***WILL NOT*** modify the input values from the case file that have not been converted to the system base. As a result, changes applied by this method ***WILL NOT*** affect the dumped case file.\n", "\n", - "`alter`: If the method operates on an input parameter, the new data ***should be in the same base as that in the input file***. This function will convert ``value`` to per unit in the system base whenever necessary. The values for storing the input data, i.e., the parameter's ``vin`` field, will be overwritten. As a result, altered values ***WILL BE*** reflected in the dumped case file.\n", - "\n", - "Besides, `Group` also has method `set` but has no `alter`." + "`alter`: If the method operates on an input parameter, the new data ***should be in the same base as that in the input file***. This function will convert ``value`` to per unit in the system base whenever necessary. The values for storing the input data, i.e., the parameter's ``vin`` field, will be overwritten. As a result, altered values ***WILL BE*** reflected in the dumped case file." ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -375,7 +373,7 @@ } ], "source": [ - "sp.PQ.set(src='p0', attr='v', idx=['PQ_1', 'PQ_2'], value=[3.2, 3.2])" + "sp.PQ.alter(src='p0', idx=['PQ_1', 'PQ_2'], value=[3.2, 3.2])" ] }, { @@ -753,7 +751,10 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "We can see that there are three PV generators in the system." + "We can see that there are three PV generators in the system.\n", + "\n", + "> **Warning:** in `MatProcessor`, `StaticGen` online status is NOT considered in its connectivity matrix `Cg`.\n", + "> The same applies for `PQ`, `Line`, and `Shunt`." ] }, { @@ -941,7 +942,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -956,7 +957,7 @@ } ], "source": [ - "sp.StaticGen.set(src='u', attr='v', idx='PV_1', value=0)" + "sp.StaticGen.set(src='u', idx='PV_1', attr='v', value=0)" ] }, { @@ -1374,7 +1375,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -1389,7 +1390,7 @@ } ], "source": [ - "sp.Line.set(src='u', attr='v', idx='Line_1', value=0)" + "sp.Line.alter(src='u', idx='Line_1', value=0)" ] }, { @@ -1540,7 +1541,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -1555,7 +1556,7 @@ } ], "source": [ - "spc.RTED.set(src='rate_a', attr='v', idx=['Line_3'], value=0.6)" + "spc.RTED.set(src='rate_a', idx=['Line_3'], attr='v', value=0.6)" ] }, { diff --git a/examples/ex5.ipynb b/examples/ex5.ipynb index b1ed6cf0..c55792f4 100644 --- a/examples/ex5.ipynb +++ b/examples/ex5.ipynb @@ -511,7 +511,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -526,8 +526,8 @@ } ], "source": [ - "sa.TGOV1.set(src='VMAX', attr='v', idx=sa.TGOV1.idx.v, value=100*np.ones(sa.TGOV1.n))\n", - "sa.TGOV1.set(src='VMIN', attr='v', idx=sa.TGOV1.idx.v, value=np.zeros(sa.TGOV1.n))" + "sa.TGOV1.alter(src='VMAX', idx=sa.TGOV1.idx.v, value=100*np.ones(sa.TGOV1.n))\n", + "sa.TGOV1.alter(src='VMIN', idx=sa.TGOV1.idx.v, value=np.zeros(sa.TGOV1.n))" ] }, { diff --git a/genconf.py b/genconf.py new file mode 100644 index 00000000..da9d74b1 --- /dev/null +++ b/genconf.py @@ -0,0 +1,63 @@ +""" +Generate configurations from pyproject.toml. +""" + +import toml +from datetime import datetime + + +# Get the current date +current_date = datetime.now().strftime("%Y-%m-%d") +comment = f"# Generated on {current_date}.\n" + + +def write_req(): + """ + Write requirements from pyproject.toml to requirements.txt and requirements-extra.txt. + """ + with open('pyproject.toml', 'r') as f: + pyproject = toml.load(f) + + dependencies = pyproject['project']['dependencies'] + dev_dependencies = pyproject['project']['optional-dependencies']['dev'] + doc_dependencies = pyproject['project']['optional-dependencies']['doc'] + + # Overwrite requirements.txt + with open('requirements.txt', 'w') as f: + f.write(comment) + for dep in dependencies: + f.write(dep + '\n') + + # Overwrite requirements-extra.txt + with open('requirements-extra.txt', 'w') as f: + f.write(comment) + max_len = max(len(dep) for dep in dev_dependencies + doc_dependencies) + for dep in dev_dependencies: + f.write(f"{dep.ljust(max_len)} # dev\n") + for dep in doc_dependencies: + f.write(f"{dep.ljust(max_len)} # doc\n") + + print("Requirements files generated successfully.") + + +def write_cfg(): + """ + Write versioneer configuration from pyproject.toml to setup.cfg. + """ + with open('pyproject.toml', 'r') as f: + pyproject = toml.load(f) + + versioneer = pyproject['tool']['versioneer'] + + with open('setup.cfg', 'w') as f: + f.write(comment) + f.write("[versioneer]\n") + for key, value in versioneer.items(): + f.write(f"{key} = {value}\n") + + print("Versioneer configuration generated successfully.") + + +if __name__ == "__main__": + write_req() + write_cfg() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..9758d91f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,73 @@ +[build-system] +requires = ["setuptools", "wheel", "versioneer[toml]"] +build-backend = "setuptools.build_meta" + +[project] +name = "ltbams" +dynamic = ["version"] +description = "Python software for scheduling modeling and co-simulation with dynamics." +readme = "README.md" +authors = [ + {name = "Jinning Wang", email = "jinninggm@gmail.com"} +] + +license = {file = "LICENSE"} + +classifiers = [ + "Development Status :: 4 - Beta", + "Natural Language :: English", + "Programming Language :: Python :: 3", + "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", + "Environment :: Console" +] + +dependencies = [ + "kvxopt>=1.3.2.1", + "numpy", + "scipy", + "sympy>=1.6,!=1.10.0", + "pandas", + "matplotlib", + "psutil", + "openpyxl", + "andes>=1.8.7", + "pybind11", + "cvxpy" +] + +[project.optional-dependencies] +dev = [ + "pytest", + "pytest-cov", + "coverage", + "flake8", + "numpydoc", + "toml", + "pyscipopt", + "toml" +] +doc = [ + "pandoc", + "ipython", + "sphinx", + "pydata-sphinx-theme", + "numpydoc", + "sphinx-copybutton", + "sphinx-panels", + "myst-parser", + "nbsphinx", + "pyscipopt" +] + +[project.scripts] +ams = "ams.cli:main" + +[tool.versioneer] +VCS = "git" +style = "pep440-post" +versionfile_source = "ams/_version.py" +versionfile_build = "ams/_version.py" +tag_prefix = "v" + +[tool.setuptools.packages.find] +where = ["."] \ No newline at end of file diff --git a/requirements-extra.txt b/requirements-extra.txt index c08df06e..2b4798bd 100644 --- a/requirements-extra.txt +++ b/requirements-extra.txt @@ -1,23 +1,19 @@ -# FORMAT -# Put your extra requirements here in the following format -# -# package[version_required] # tag1, tag2, ... -# -# Allow at least one space between the package name/version and '#' -# -# Only one `#` is allowed per line. Lines starting with `#` are ignored. - -pytest==7.4 # dev -pytest-cov # dev -coverage # dev -flake8 # dev -pyscipopt # dev -ipython # dev, doc -sphinx # dev, doc -pydata-sphinx-theme # dev, doc -numpydoc # dev, doc -sphinx-copybutton # dev, doc -sphinx-panels # dev, doc -pydata-sphinx-theme # dev, doc -myst-parser # dev, doc -nbsphinx # dev, doc \ No newline at end of file +# Generated on 2024-11-11. +pytest # dev +pytest-cov # dev +coverage # dev +flake8 # dev +numpydoc # dev +toml # dev +pyscipopt # dev +toml # dev +pandoc # doc +ipython # doc +sphinx # doc +pydata-sphinx-theme # doc +numpydoc # doc +sphinx-copybutton # doc +sphinx-panels # doc +myst-parser # doc +nbsphinx # doc +pyscipopt # doc diff --git a/requirements.txt b/requirements.txt index 6466e12f..5ceafe9f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +# Generated on 2024-11-11. kvxopt>=1.3.2.1 numpy scipy @@ -8,4 +9,4 @@ psutil openpyxl andes>=1.8.7 pybind11 -cvxpy \ No newline at end of file +cvxpy diff --git a/setup.cfg b/setup.cfg index ca7d7376..be71e22f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,3 +1,4 @@ +# Generated on 2024-11-11. [versioneer] VCS = git style = pep440-post diff --git a/setup.py b/setup.py index 71bf24c3..60ac0388 100644 --- a/setup.py +++ b/setup.py @@ -1,108 +1,15 @@ -import re -import sys -import os -from collections import defaultdict - -from setuptools import find_packages, setup +from setuptools import setup import versioneer -if sys.version_info < (3, 6): - error = """ -ams does not support Python <= {0}.{1}. -Python 3.6 and above is required. Check your Python version like so: - -python3 --version - -This may be due to an out-of-date pip. Make sure you have pip >= 9.0.1. -Upgrade pip like so: - -pip install --upgrade pip -""".format(3, 6) - sys.exit(error) - -here = os.path.abspath(os.path.dirname(__file__)) - -with open(os.path.join(here, 'README.md'), encoding='utf-8') as readme_file: - readme = readme_file.read() - - -def parse_requires(filename): - with open(os.path.join(here, filename)) as requirements_file: - reqs = [ - line for line in requirements_file.read().splitlines() - if not line.startswith('#') - ] - return reqs - - -def get_extra_requires(filename, add_all=True): - """ - Build ``extras_require`` from an invert requirements file. - - See: - https://hanxiao.io/2019/11/07/A-Better-Practice-for-Managing-extras-require-Dependencies-in-Python/ - """ - - with open(os.path.join(here, filename)) as fp: - extra_deps = defaultdict(set) - for k in fp: - if k.strip() and not k.startswith('#'): - tags = set() - if '#' in k: - if k.count("#") > 1: - raise ValueError("Invalid line: {}".format(k)) - - k, v = k.split('#') - tags.update(vv.strip() for vv in v.split(',')) - - tags.add(re.split('[<=>]', k)[0]) - for t in tags: - extra_deps[t].add(k) - - # add tag `all` at the end - if add_all: - extra_deps['all'] = set(vv for v in extra_deps.values() for vv in v) - - return extra_deps - - -extras_require = get_extra_requires("requirements-extra.txt") - -# --- update `extras_conda` to include packages only available in PyPI --- - setup( - name='ltbams', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), - description="Python software for scheduling modeling and co-simulation with dynanics.", - long_description=readme, - long_description_content_type='text/markdown', - author="Jinning Wang", - author_email='jinninggm@gmail.com', - url='https://github.com/CURENT/ams', - packages=find_packages(exclude=[]), - entry_points={ - 'console_scripts': [ - 'ams = ams.cli:main', - ], - }, - include_package_data=True, package_data={ - 'ltbams': [ + 'ams': [ # When adding files here, remember to update MANIFEST.in as well, # or else they will not be included in the distribution on PyPI! # 'path/to/data_file', ] }, - install_requires=parse_requires('requirements.txt'), - extras_require=extras_require, - license="GNU Public License v3", - classifiers=[ - 'Development Status :: 4 - Beta', - 'Natural Language :: English', - 'Programming Language :: Python :: 3', - 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)', - 'Environment :: Console', - ], ) diff --git a/tests/test_addressing.py b/tests/test_addressing.py index 1c7d3f1d..674f8458 100644 --- a/tests/test_addressing.py +++ b/tests/test_addressing.py @@ -1,7 +1,7 @@ import unittest +import numpy as np import ams -import numpy as np ams.config_logger(stream_level=40) diff --git a/tests/test_andes_mats.py b/tests/test_andes_mats.py new file mode 100644 index 00000000..eb27b560 --- /dev/null +++ b/tests/test_andes_mats.py @@ -0,0 +1,61 @@ +""" +Test ANDES matrices. +""" + +import unittest +import numpy as np +import importlib.metadata +from packaging.version import parse as parse_version + +import ams + + +class TestMatrices(unittest.TestCase): + """ + Tests for system matrices consistency. + """ + + andes_version = importlib.metadata.version("andes") + if parse_version(andes_version) < parse_version('1.9.2'): + raise unittest.SkipTest("Requires ANDES version >= 1.9.2") + + sp = ams.load(ams.get_case('5bus/pjm5bus_demo.xlsx'), + setup=True, no_output=True, default_config=True,) + sa = sp.to_andes(setup=True, no_output=True, default_config=True,) + + def setUp(self) -> None: + """ + Test setup. + """ + + def test_build_y(self): + """ + Test build_y consistency. + """ + ysp = self.sp.Line.build_y() + ysa = self.sa.Line.build_y() + np.testing.assert_equal(np.array(ysp.V), np.array(ysa.V)) + + def test_build_Bp(self): + """ + Test build_Bp consistency. + """ + Bp_sp = self.sp.Line.build_Bp() + Bp_sa = self.sa.Line.build_Bp() + np.testing.assert_equal(np.array(Bp_sp.V), np.array(Bp_sa.V)) + + def test_build_Bpp(self): + """ + Test build_Bpp consistency. + """ + Bpp_sp = self.sp.Line.build_Bpp() + Bpp_sa = self.sa.Line.build_Bpp() + np.testing.assert_equal(np.array(Bpp_sp.V), np.array(Bpp_sa.V)) + + def test_build_Bdc(self): + """ + Test build_Bdc consistency. + """ + Bdc_sp = self.sp.Line.build_Bdc() + Bdc_sa = self.sa.Line.build_Bdc() + np.testing.assert_equal(np.array(Bdc_sp.V), np.array(Bdc_sa.V)) diff --git a/tests/test_benchmarks.py b/tests/test_benchmarks.py new file mode 100644 index 00000000..66c57af8 --- /dev/null +++ b/tests/test_benchmarks.py @@ -0,0 +1,149 @@ +import unittest + +import ams +import ams.benchmarks as bp + + +def are_packages_available(*packages): + """ + Check if the specified packages are available. + + Parameters + ---------- + *packages : str + Names of the packages to check. + + Returns + ------- + bool + True if all packages are available, False otherwise. + """ + for package in packages: + try: + __import__(package) + except ImportError: + return False + return True + + +def require_packages(*packages): + """ + Decorator to skip a test if the specified packages are not available. + + Parameters + ---------- + *packages : str + Names of the packages required for the test. + + Returns + ------- + function + The decorated test function. + """ + def decorator(test_func): + return unittest.skipIf( + not are_packages_available(*packages), + f"Skipping test because one or more required packages are not available: {', '.join(packages)}" + )(test_func) + return decorator + + +class TestBenchmarks(unittest.TestCase): + """ + Test module for benchmarks.py. + """ + + def test_get_tool_versions(self): + self.assertIsInstance(bp.get_tool_versions(), dict) + + def test_pre_solve_rted(self): + ss = ams.load(ams.get_case('5bus/pjm5bus_demo.xlsx'), + setup=True, default_config=True, no_output=True) + pre_time = bp.pre_solve(ss, 'RTED') + self.assertIsInstance(pre_time, dict) + self.assertEqual(len(pre_time), len(bp.cols_pre)) + for v in pre_time.values(): + self.assertIsInstance(v, float) + + def test_dcopf_solve(self): + ss = ams.load(ams.get_case('matpower/case5.m'), + setup=True, default_config=True, no_output=True) + s, obj = bp.time_routine_solve(ss, routine='DCOPF', solver='CLARABEL', ignore_dpp=False) + self.assertGreater(s, 0) + self.assertGreater(obj, 0) + + def test_pre_solve_ed(self): + ss = ams.load(ams.get_case('5bus/pjm5bus_demo.xlsx'), + setup=True, default_config=True, no_output=True) + pre_time = bp.pre_solve(ss, 'ED') + self.assertIsInstance(pre_time, dict) + self.assertEqual(len(pre_time), len(bp.cols_pre)) + for v in pre_time.values(): + self.assertIsInstance(v, float) + + def test_ed_solve(self): + ss = ams.load(ams.get_case('5bus/pjm5bus_demo.xlsx'), + setup=True, default_config=True, no_output=True) + s, obj = bp.time_routine_solve(ss, 'ED', solver='CLARABEL', ignore_dpp=True) + self.assertGreater(s, 0) + self.assertGreater(obj, 0) + + @require_packages('pandapower') + def time_pdp_dcopf(self): + ss = ams.load(ams.get_case('matpower/case5.m'), + setup=True, default_config=True, no_output=True) + ppc = ams.io.pypower.system2ppc(ss) + ppn = ams.io.pandapower.converter.from_ppc(ppc, f_hz=ss.config.freq) + s, obj = bp.time_pdp_dcopf(ppn) + self.assertGreater(s, 0) + self.assertGreater(obj, 0) + + def test_time_dcopf(self): + ss = ams.load(ams.get_case('matpower/case5.m'), + setup=True, default_config=True, no_output=True) + pre_time, sol = bp.time_routine(ss, + routine='DCOPF', + solvers=['CLARABEL', 'SCIP', 'pandapower'], ignore_dpp=False) + for v in pre_time.values(): + self.assertGreaterEqual(v, 0) + + self.assertGreater(sol['CLARABEL']['time'], 0) + self.assertGreater(sol['SCIP']['time'], 0) + self.assertAlmostEqual(sol['CLARABEL']['obj'], + sol['SCIP']['obj'], + places=2) + if not are_packages_available('pandapower'): + self.assertEqual(sol['pandapower']['time'], -1) + self.assertEqual(sol['pandapower']['obj'], -1) + else: + self.assertGreater(sol['pandapower']['obj'], 0) + self.assertAlmostEqual(sol['CLARABEL']['obj'], + sol['pandapower']['obj'], + places=2) + + def test_time_rted(self): + ss = ams.load(ams.get_case('5bus/pjm5bus_demo.xlsx'), + setup=True, default_config=True, no_output=True) + pre_time, sol = bp.time_routine(ss, + routine='RTED', + solvers=['CLARABEL', 'SCIP', 'pandapower'], ignore_dpp=False) + for v in pre_time.values(): + self.assertGreaterEqual(v, 0) + + self.assertGreater(sol['CLARABEL']['time'], 0) + self.assertGreater(sol['SCIP']['time'], 0) + self.assertAlmostEqual(sol['CLARABEL']['obj'], + sol['SCIP']['obj'], + places=2) + self.assertEqual(sol['pandapower']['time'], -1) + self.assertEqual(sol['pandapower']['obj'], -1) + + def test_time_dcopf_with_lf(self): + ss = ams.load(ams.get_case('matpower/case5.m'), + setup=True, default_config=True, no_output=True) + pre_time, sol = bp.time_dcopf_with_lf(ss, solvers=['CLARABEL', 'SCIP'], load_factors=[ + 1, 0.5, 0.25], ignore_dpp=False) + self.assertEqual(len(pre_time), len(bp.cols_pre)) + self.assertAlmostEqual(sol['CLARABEL']['obj'], + sol['SCIP']['obj'], + places=2) diff --git a/tests/test_case.py b/tests/test_case.py index 8c2869ef..6f65d5f2 100644 --- a/tests/test_case.py +++ b/tests/test_case.py @@ -148,7 +148,7 @@ def test_ieee14_raw2xlsx(self): no_output=True, default_config=True, ) - ams.io.xlsx.write(ss, "ieee14.xlsx") + ams.io.xlsx.write(ss, "ieee14.xlsx", overwrite=True) self.assertTrue(os.path.exists("ieee14.xlsx")) os.remove("ieee14.xlsx") @@ -159,7 +159,7 @@ def test_ieee14_raw2json(self): no_output=True, default_config=True, ) - ams.io.json.write(ss, "ieee14.json") + ams.io.json.write(ss, "ieee14.json", overwrite=True) self.assertTrue(os.path.exists("ieee14.json")) os.remove("ieee14.json") diff --git a/tests/test_dctypes.py b/tests/test_dctypes.py deleted file mode 100644 index 478e1718..00000000 --- a/tests/test_dctypes.py +++ /dev/null @@ -1,111 +0,0 @@ -import unittest -import numpy as np - -import ams -import cvxpy as cp - - -def require_MIP_solver(f): - """ - Decorator for skipping tests that require MIP solver. - """ - def wrapper(*args, **kwargs): - all_solvers = cp.installed_solvers() - mip_solvers = ['CPLEX', 'GUROBI', 'MOSEK'] - if any(s in mip_solvers for s in all_solvers): - pass - else: - raise unittest.SkipTest("MIP solver is not available.") - return f(*args, **kwargs) - return wrapper - - -class TestDCED(unittest.TestCase): - """ - Test DCED types routine. - """ - - def setUp(self) -> None: - self.ss = ams.load(ams.get_case("ieee39/ieee39_uced_esd1.xlsx"), - default_config=True, - no_output=True, - ) - - def test_dcopf(self): - """ - Test DCOPF. - """ - init = self.ss.DCOPF.init() - self.assertTrue(init, "DCOPF initialization failed!") - self.ss.DCOPF.run(solver='CLARABEL') - np.testing.assert_equal(self.ss.DCOPF.exit_code, 0) - - def test_rted(self) -> None: - """ - Test RTED. - """ - init = self.ss.RTED.init() - self.assertTrue(init, "RTED initialization failed!") - self.ss.RTED.run(solver='CLARABEL') - np.testing.assert_equal(self.ss.RTED.exit_code, 0) - - def test_ed(self) -> None: - """ - Test ED. - """ - init = self.ss.ED.init() - self.assertTrue(init, "ED initialization failed!") - self.ss.ED.run(solver='CLARABEL') - np.testing.assert_equal(self.ss.ED.exit_code, 0) - - @require_MIP_solver - def test_rtedes(self) -> None: - """ - Test RTEDES. - """ - init = self.ss.RTEDES.init() - self.assertTrue(init, "RTEDES initialization failed!") - self.ss.RTEDES.run() - np.testing.assert_equal(self.ss.RTEDES.exit_code, 0) - - @require_MIP_solver - def test_edes(self) -> None: - """ - Test EDES. - """ - init = self.ss.EDES.init() - self.assertTrue(init, "EDES initialization failed!") - self.ss.EDES.run() - np.testing.assert_equal(self.ss.EDES.exit_code, 0) - - -class test_DCUC(unittest.TestCase): - """ - Test DCUC types routine. - """ - - def setUp(self) -> None: - self.ss = ams.load(ams.get_case("ieee39/ieee39_uced_esd1.xlsx"), - default_config=True, - no_output=True, - ) - - @require_MIP_solver - def test_uc(self) -> None: - """ - Test UC. - """ - init = self.ss.UC.init() - self.assertTrue(init, "UC initialization failed!") - self.ss.UC.run() - np.testing.assert_equal(self.ss.UC.exit_code, 0) - - @require_MIP_solver - def test_uces(self) -> None: - """ - Test UCES. - """ - init = self.ss.UCES.init() - self.assertTrue(init, "UCES initialization failed!") - self.ss.UCES.run() - np.testing.assert_equal(self.ss.UCES.exit_code, 0) diff --git a/tests/test_andes.py b/tests/test_interop.py similarity index 76% rename from tests/test_andes.py rename to tests/test_interop.py index d72602f0..11070f21 100644 --- a/tests/test_andes.py +++ b/tests/test_interop.py @@ -1,73 +1,20 @@ """ -Test ANDES interface. +Test interface. """ import unittest import numpy as np -import pkg_resources -from pkg_resources import parse_version import andes import ams -from ams.interop.andes import (build_group_table, make_link_table, to_andes, - parse_addfile, verify_pf) +from ams.interface import (build_group_table, make_link_table, to_andes, + parse_addfile, verify_pf) -class TestMatrices(unittest.TestCase): +class TestAndesConversion(unittest.TestCase): """ - Tests for system matrices consistency. - """ - - andes_version = pkg_resources.get_distribution("andes").version - if parse_version(andes_version) < parse_version('1.9.2'): - raise unittest.SkipTest("Requires ANDES version >= 1.9.2") - - sp = ams.load(ams.get_case('5bus/pjm5bus_demo.xlsx'), - setup=True, no_output=True, default_config=True,) - sa = sp.to_andes(setup=True, no_output=True, default_config=True,) - - def setUp(self) -> None: - """ - Test setup. - """ - - def test_build_y(self): - """ - Test build_y consistency. - """ - ysp = self.sp.Line.build_y() - ysa = self.sa.Line.build_y() - np.testing.assert_equal(np.array(ysp.V), np.array(ysa.V)) - - def test_build_Bp(self): - """ - Test build_Bp consistency. - """ - Bp_sp = self.sp.Line.build_Bp() - Bp_sa = self.sa.Line.build_Bp() - np.testing.assert_equal(np.array(Bp_sp.V), np.array(Bp_sa.V)) - - def test_build_Bpp(self): - """ - Test build_Bpp consistency. - """ - Bpp_sp = self.sp.Line.build_Bpp() - Bpp_sa = self.sa.Line.build_Bpp() - np.testing.assert_equal(np.array(Bpp_sp.V), np.array(Bpp_sa.V)) - - def test_build_Bdc(self): - """ - Test build_Bdc consistency. - """ - Bdc_sp = self.sp.Line.build_Bdc() - Bdc_sa = self.sa.Line.build_Bdc() - np.testing.assert_equal(np.array(Bdc_sp.V), np.array(Bdc_sa.V)) - - -class TestInteropBase(unittest.TestCase): - """ - Tests for basic function of ANDES interface. + Tests conversion from AMS to ANDES. """ ad_cases = [ 'ieee14/ieee14_full.xlsx', @@ -211,8 +158,8 @@ def test_data_exchange(self): no_output=True, default_config=True,) # alleviate limiter - sa.TGOV1.set(src='VMAX', attr='v', idx=sa.TGOV1.idx.v, value=100*np.ones(sa.TGOV1.n)) - sa.TGOV1.set(src='VMIN', attr='v', idx=sa.TGOV1.idx.v, value=np.zeros(sa.TGOV1.n)) + sa.TGOV1.set(src='VMAX', idx=sa.TGOV1.idx.v, attr='v', value=100*np.ones(sa.TGOV1.n)) + sa.TGOV1.set(src='VMIN', idx=sa.TGOV1.idx.v, attr='v', value=np.zeros(sa.TGOV1.n)) # --- test before PFlow --- self.sp.dyn.send(adsys=sa, routine='RTED') diff --git a/tests/test_io.py b/tests/test_io.py new file mode 100644 index 00000000..6383e903 --- /dev/null +++ b/tests/test_io.py @@ -0,0 +1,32 @@ +""" +Test file input/output. +""" + +import unittest +import numpy as np + +import ams + + +class TestMATPOWER(unittest.TestCase): + + def setUp(self): + self.mpc5 = ams.io.matpower.m2mpc(ams.get_case('matpower/case5.m')) + self.mpc14 = ams.io.matpower.m2mpc(ams.get_case('matpower/case14.m')) + + def test_m2mpc(self): + self.assertTupleEqual(self.mpc5['gencost'].shape, (5, 6)) + self.assertTupleEqual(self.mpc14['gencost'].shape, (5, 7)) + + def test_mpc2system(self): + system5 = ams.system.System() + ams.io.matpower.mpc2system(self.mpc5, system5) + # In case5.m, the gencost has type 2 cost model, with 2 parameters. + np.testing.assert_array_equal(system5.GCost.c2.v, + np.zeros(system5.StaticGen.n)) + + system14 = ams.system.System() + ams.io.matpower.mpc2system(self.mpc14, system14) + # In case14.m, the gencost has type 2 cost model, with 3 parameters. + np.testing.assert_array_less(np.zeros(system14.StaticGen.n), + system14.GCost.c2.v,) diff --git a/tests/test_jumper.py b/tests/test_jumper.py new file mode 100644 index 00000000..9395ae97 --- /dev/null +++ b/tests/test_jumper.py @@ -0,0 +1,27 @@ +""" +Test `Jumper` class. +""" + +import unittest + +import ams + + +class TessJumper(unittest.TestCase): + """ + Test `Jumper` class. + """ + + def setUp(self) -> None: + """ + Test setup. + """ + self.sp = ams.load(ams.get_case("5bus/pjm5bus_jumper.xlsx"), + setup=True, no_output=True, default_config=True) + + def test_to_andes(self): + """ + Test `to_andes` method when `Jumper` exists. + """ + sa = self.sp.to_andes() + self.assertEqual(sa.Jumper.n, 1) diff --git a/tests/test_mats.py b/tests/test_matp.py similarity index 99% rename from tests/test_mats.py rename to tests/test_matp.py index c2008185..93ae31a3 100644 --- a/tests/test_mats.py +++ b/tests/test_matp.py @@ -1,3 +1,7 @@ +""" +Test module MatProcessor. +""" + import unittest import os diff --git a/tests/test_routine.py b/tests/test_routine.py index 2297d352..64b8cd70 100644 --- a/tests/test_routine.py +++ b/tests/test_routine.py @@ -1,9 +1,9 @@ import unittest import numpy as np -from andes.shared import pd import ams +from ams.shared import pd class TestRoutineMethods(unittest.TestCase): @@ -92,56 +92,10 @@ def test_value_method(self): # --- constraint values --- for constr in self.ss.DCOPF.constrs.values(): - np.testing.assert_almost_equal(constr.v, constr.v2, decimal=6) + np.testing.assert_almost_equal(constr.v, constr.e, decimal=6) # --- objective value --- - self.assertAlmostEqual(self.ss.DCOPF.obj.v, self.ss.DCOPF.obj.v2, places=6) - - -class TestOModel(unittest.TestCase): - """ - Test methods of `RTED`. - """ - - def setUp(self) -> None: - self.ss = ams.load(ams.get_case("5bus/pjm5bus_demo.xlsx"), - setup=True, default_config=True, no_output=True) - # decrease load first - self.ss.PQ.set(src='p0', attr='v', idx=['PQ_1', 'PQ_2'], value=[0.3, 0.3]) - - def test_trip(self): - """ - Test generator trip. - """ - # --- run DCOPF --- - self.ss.DCOPF.run(solver='CLARABEL') - obj = self.ss.DCOPF.obj.v - - # --- generator trip --- - self.ss.StaticGen.set(src='u', attr='v', idx='PV_1', value=0) - - self.ss.DCOPF.update() - - self.ss.DCOPF.run(solver='CLARABEL') - self.assertTrue(self.ss.DCOPF.converged, "DCOPF did not converge under generator trip!") - obj_gt = self.ss.DCOPF.obj.v - self.assertGreater(obj_gt, obj) - - pg_trip = self.ss.DCOPF.get(src='pg', attr='v', idx='PV_1') - np.testing.assert_almost_equal(pg_trip, 0, decimal=6) - - # --- trip line --- - self.ss.Line.set(src='u', attr='v', idx='Line_4', value=0) - - self.ss.DCOPF.update() - - self.ss.DCOPF.run(solver='CLARABEL') - self.assertTrue(self.ss.DCOPF.converged, "DCOPF did not converge under line trip!") - obj_lt = self.ss.DCOPF.obj.v - self.assertGreater(obj_lt, obj_gt) - - plf_trip = self.ss.DCOPF.get(src='plf', attr='v', idx='Line_4') - np.testing.assert_almost_equal(plf_trip, 0, decimal=6) + self.assertAlmostEqual(self.ss.DCOPF.obj.v, self.ss.DCOPF.obj.e, places=6) class TestSetOptzValueACOPF(unittest.TestCase): @@ -222,39 +176,3 @@ def test_vset_after_init(self): self.assertRaises(AttributeError, lambda: setattr(self.ss.DCOPF.plfub, 'v', 1)) # set values to `Objective` is not allowed self.assertRaises(AttributeError, lambda: setattr(self.ss.DCOPF.obj, 'v', 1)) - - -class TestRTED(unittest.TestCase): - """ - Test methods of `RTED`. - """ - - def setUp(self) -> None: - self.ss = ams.load(ams.get_case("5bus/pjm5bus_demo.xlsx"), - setup=True, - default_config=True, - no_output=True, - ) - self.ss.RTED.run(solver='CLARABEL') - - def test_dc2ac(self): - """ - Test `RTED.init()` method. - """ - self.ss.RTED.dc2ac() - self.assertTrue(self.ss.RTED.converted, "AC conversion failed!") - self.assertTrue(self.ss.RTED.exec_time > 0, "Execution time is not greater than 0.") - - stg_idx = self.ss.StaticGen.get_idx() - pg_rted = self.ss.RTED.get(src='pg', attr='v', idx=stg_idx) - pg_acopf = self.ss.ACOPF.get(src='pg', attr='v', idx=stg_idx) - np.testing.assert_almost_equal(pg_rted, pg_acopf, decimal=3) - - bus_idx = self.ss.Bus.get_idx() - v_rted = self.ss.RTED.get(src='vBus', attr='v', idx=bus_idx) - v_acopf = self.ss.ACOPF.get(src='vBus', attr='v', idx=bus_idx) - np.testing.assert_almost_equal(v_rted, v_acopf, decimal=3) - - a_rted = self.ss.RTED.get(src='aBus', attr='v', idx=bus_idx) - a_acopf = self.ss.ACOPF.get(src='aBus', attr='v', idx=bus_idx) - np.testing.assert_almost_equal(a_rted, a_acopf, decimal=3) diff --git a/tests/test_rtn_dcopf.py b/tests/test_rtn_dcopf.py new file mode 100644 index 00000000..7903f553 --- /dev/null +++ b/tests/test_rtn_dcopf.py @@ -0,0 +1,77 @@ +import unittest + +import ams + + +class TestDCOPF(unittest.TestCase): + """ + Test routine `DCOPF`. + """ + + def setUp(self) -> None: + self.ss = ams.load(ams.get_case("5bus/pjm5bus_demo.xlsx"), + setup=True, default_config=True, no_output=True) + # decrease load first + self.ss.PQ.set(src='p0', attr='v', idx=['PQ_1', 'PQ_2'], value=[0.3, 0.3]) + + def test_init(self): + """ + Test initialization. + """ + self.ss.DCOPF.init() + self.assertTrue(self.ss.DCOPF.initialized, "DCOPF initialization failed!") + + def test_trip_gen(self): + """ + Test generator tripping. + """ + stg = 'PV_1' + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=0) + + self.ss.DCOPF.update() + self.ss.DCOPF.run(solver='CLARABEL') + self.assertTrue(self.ss.DCOPF.converged, "DCOPF did not converge under generator trip!") + self.assertAlmostEqual(self.ss.DCOPF.get(src='pg', attr='v', idx=stg), + 0, places=6, + msg="Generator trip does not take effect!") + + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=1) # reset + + def test_trip_line(self): + """ + Test line tripping. + """ + self.ss.Line.set(src='u', attr='v', idx='Line_3', value=0) + + self.ss.DCOPF.update() + self.ss.DCOPF.run(solver='CLARABEL') + self.assertTrue(self.ss.DCOPF.converged, "DCOPF did not converge under line trip!") + self.assertAlmostEqual(self.ss.DCOPF.get(src='plf', attr='v', idx='Line_3'), + 0, places=6, + msg="Line trip does not take effect!") + + self.ss.Line.alter(src='u', idx='Line_3', value=1) # reset + + def test_set_load(self): + """ + Test setting and tripping load. + """ + # --- run DCOPF --- + self.ss.DCOPF.run(solver='CLARABEL') + pgs = self.ss.DCOPF.pg.v.sum() + + # --- set load --- + self.ss.PQ.set(src='p0', attr='v', idx='PQ_1', value=0.1) + self.ss.DCOPF.update() + + self.ss.DCOPF.run(solver='CLARABEL') + pgs_pqt = self.ss.DCOPF.pg.v.sum() + self.assertLess(pgs_pqt, pgs, "Load set does not take effect!") + + # --- trip load --- + self.ss.PQ.set(src='u', attr='v', idx='PQ_2', value=0) + self.ss.DCOPF.update() + + self.ss.DCOPF.run(solver='CLARABEL') + pgs_pqt2 = self.ss.DCOPF.pg.v.sum() + self.assertLess(pgs_pqt2, pgs_pqt, "Load trip does not take effect!") diff --git a/tests/test_rtn_ed.py b/tests/test_rtn_ed.py new file mode 100644 index 00000000..cadefd3f --- /dev/null +++ b/tests/test_rtn_ed.py @@ -0,0 +1,184 @@ +import unittest +import numpy as np + +import ams + + +class TestED(unittest.TestCase): + """ + Test routine `ED`. + """ + + def setUp(self) -> None: + self.ss = ams.load(ams.get_case("5bus/pjm5bus_demo.xlsx"), + setup=True, default_config=True, no_output=True) + # decrease load first + self.ss.PQ.set(src='p0', attr='v', idx=['PQ_1', 'PQ_2'], value=[0.3, 0.3]) + + def test_init(self): + """ + Test initialization. + """ + self.ss.ED.init() + self.assertTrue(self.ss.ED.initialized, "ED initialization failed!") + + def test_trip_gen(self): + """ + Test generator tripping. + """ + # a) ensure EDTSlot.ug takes effect + # NOTE: manually chang ug.v for testing purpose + stg = 'PV_1' + stg_uid = self.ss.ED.pg.get_idx().index(stg) + loc_offtime = np.array([0, 2, 4]) + self.ss.EDTSlot.ug.v[loc_offtime, stg_uid] = 0 + + self.ss.ED.run(solver='CLARABEL') + self.assertTrue(self.ss.ED.converged, "ED did not converge under generator trip!") + pg_pv1 = self.ss.ED.get(src='pg', attr='v', idx=stg) + np.testing.assert_almost_equal(np.zeros_like(loc_offtime), + pg_pv1[loc_offtime], + decimal=6, + err_msg="Generator trip does not take effect!") + + self.ss.EDTSlot.ug.v[...] = 1 # reset + + # b) ensure StaticGen.u does not take effect + # NOTE: in ED, `EDTSlot.ug` is used instead of `StaticGen.u` + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=0) + self.ss.ED.update() + + self.ss.ED.run(solver='CLARABEL') + self.assertTrue(self.ss.ED.converged, "ED did not converge under generator trip!") + pg_pv1 = self.ss.ED.get(src='pg', attr='v', idx=stg) + np.testing.assert_array_less(np.zeros_like(pg_pv1), pg_pv1, + err_msg="Generator trip take effect, which is unexpected!") + + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=1) # reset + + def test_trip_line(self): + """ + Test line tripping. + """ + self.ss.Line.set(src='u', attr='v', idx='Line_3', value=0) + self.ss.ED.update() + + self.ss.ED.run(solver='CLARABEL') + self.assertTrue(self.ss.ED.converged, "ED did not converge under line trip!") + plf_l3 = self.ss.ED.get(src='plf', attr='v', idx='Line_3') + np.testing.assert_almost_equal(np.zeros_like(plf_l3), + plf_l3, decimal=6) + + self.ss.Line.alter(src='u', idx='Line_3', value=1) # reset + + def test_set_load(self): + """ + Test setting and tripping load. + """ + self.ss.ED.run(solver='CLARABEL') + pgs = self.ss.ED.pg.v.sum() + + # --- set load --- + self.ss.PQ.set(src='p0', attr='v', idx='PQ_1', value=0.1) + self.ss.ED.update() + + self.ss.ED.run(solver='CLARABEL') + pgs_pqt = self.ss.ED.pg.v.sum() + self.assertLess(pgs_pqt, pgs, "Load set does not take effect!") + + # --- trip load --- + self.ss.PQ.alter(src='u', idx='PQ_2', value=0) + self.ss.ED.update() + + self.ss.ED.run(solver='CLARABEL') + pgs_pqt2 = self.ss.ED.pg.v.sum() + self.assertLess(pgs_pqt2, pgs_pqt, "Load trip does not take effect!") + + +class TestEDES(unittest.TestCase): + """ + Test routine `EDES`. + """ + + def setUp(self) -> None: + self.ss = ams.load(ams.get_case("5bus/pjm5bus_uced_esd1.xlsx"), + setup=True, default_config=True, no_output=True) + # decrease load first + self.ss.PQ.set(src='p0', attr='v', idx=['PQ_1', 'PQ_2'], value=[0.3, 0.3]) + + def test_init(self): + """ + Test initialization. + """ + self.ss.EDES.init() + self.assertTrue(self.ss.EDES.initialized, "EDES initialization failed!") + + def test_trip_gen(self): + """ + Test generator tripping. + """ + # a) ensure EDTSlot.ug takes effect + # NOTE: manually chang ug.v for testing purpose + stg = 'PV_1' + stg_uid = self.ss.ED.pg.get_idx().index(stg) + loc_offtime = np.array([0, 2]) + self.ss.EDTSlot.ug.v[loc_offtime, stg_uid] = 0 + + self.ss.ED.run(solver='CLARABEL') + self.assertTrue(self.ss.ED.converged, "ED did not converge under generator trip!") + pg_pv1 = self.ss.ED.get(src='pg', attr='v', idx=stg) + np.testing.assert_almost_equal(np.zeros_like(loc_offtime), + pg_pv1[loc_offtime], + decimal=6, + err_msg="Generator trip does not take effect!") + + self.ss.EDTSlot.ug.v[...] = 1 # reset + + # b) ensure StaticGen.u does not take effect + # NOTE: in ED, `EDTSlot.ug` is used instead of `StaticGen.u` + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=0) + self.ss.ED.update() + + self.ss.ED.run(solver='CLARABEL') + self.assertTrue(self.ss.ED.converged, "ED did not converge under generator trip!") + pg_pv1 = self.ss.ED.get(src='pg', attr='v', idx=stg) + np.testing.assert_array_less(np.zeros_like(pg_pv1), pg_pv1, + err_msg="Generator trip take effect, which is unexpected!") + + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=1) # reset + + def test_line_trip(self): + """ + Test line tripping. + """ + self.ss.Line.set(src='u', attr='v', idx=3, value=0) + self.ss.EDES.run(solver='SCIP') + self.assertTrue(self.ss.EDES.converged, "EDES did not converge!") + plf_l3 = self.ss.EDES.get(src='plf', attr='v', idx=3) + np.testing.assert_almost_equal(np.zeros_like(plf_l3), + plf_l3, decimal=6) + + self.ss.Line.alter(src='u', idx=3, value=1) + + def test_set_load(self): + """ + Test setting and tripping load. + """ + self.ss.EDES.run(solver='SCIP') + pgs = self.ss.EDES.pg.v.sum() + + # --- set load --- + self.ss.PQ.set(src='p0', attr='v', idx='PQ_1', value=0.1) + self.ss.EDES.update() + + self.ss.EDES.run(solver='SCIP') + pgs_pqt = self.ss.EDES.pg.v.sum() + self.assertLess(pgs_pqt, pgs, "Load set does not take effect!") + + # --- trip load --- + self.ss.PQ.alter(src='u', idx='PQ_2', value=0) + self.ss.EDES.update() + + self.ss.EDES.run(solver='SCIP') + pgs_pqt2 = self.ss.EDES.pg.v.sum() + self.assertLess(pgs_pqt2, pgs_pqt, "Load trip does not take effect!") diff --git a/tests/test_rtn_pflow.py b/tests/test_rtn_pflow.py new file mode 100644 index 00000000..c89c34fa --- /dev/null +++ b/tests/test_rtn_pflow.py @@ -0,0 +1,219 @@ +import unittest + +import ams + + +class TestDCPF(unittest.TestCase): + """ + Test routine `DCPF`. + """ + + def setUp(self) -> None: + self.ss = ams.load(ams.get_case('matpower/case14.m'), + setup=True, no_output=True, default_config=True) + + def test_init(self): + """ + Test initialization. + """ + self.ss.DCPF.init() + self.assertTrue(self.ss.DCPF.initialized, "DCPF initialization failed!") + + def test_trip_gen(self): + """ + Test generator tripping. + """ + stg = 2 + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=0) + + self.ss.DCPF.update() + self.ss.DCPF.run() + self.assertTrue(self.ss.DCPF.converged, "DCPF did not converge under generator trip!") + self.assertAlmostEqual(self.ss.DCPF.get(src='pg', attr='v', idx=stg), + 0, places=6, + msg="Generator trip does not take effect!") + + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=1) # reset + + def test_trip_line(self): + """ + Test line tripping. + """ + self.ss.Line.set(src='u', attr='v', idx='Line_3', value=0) + + self.ss.DCPF.update() + self.ss.DCPF.run() + self.assertTrue(self.ss.DCPF.converged, "DCPF did not converge under line trip!") + self.assertAlmostEqual(self.ss.DCPF.get(src='plf', attr='v', idx='Line_3'), + 0, places=6, + msg="Line trip does not take effect!") + + self.ss.Line.alter(src='u', idx='Line_3', value=1) + + def test_set_load(self): + """ + Test setting and tripping load. + """ + # --- run DCPF --- + self.ss.DCPF.run() + pgs = self.ss.DCPF.pg.v.sum() + + # --- set load --- + self.ss.PQ.set(src='p0', attr='v', idx='PQ_1', value=0.1) + self.ss.DCPF.update() + + self.ss.DCPF.run() + pgs_pqt = self.ss.DCPF.pg.v.sum() + self.assertLess(pgs_pqt, pgs, "Load set does not take effect!") + + # --- trip load --- + self.ss.PQ.set(src='p0', attr='v', idx='PQ_1', value=0) + self.ss.DCPF.update() + + self.ss.DCPF.run() + pgs_pqt2 = self.ss.DCPF.pg.v.sum() + self.assertLess(pgs_pqt2, pgs_pqt, "Load trip does not take effect!") + + +class TestPFlow(unittest.TestCase): + """ + Test routine `DCPF`. + """ + + def setUp(self) -> None: + self.ss = ams.load(ams.get_case('matpower/case14.m'), + setup=True, no_output=True, default_config=True) + + def test_init(self): + """ + Test initialization. + """ + self.ss.PFlow.init() + self.assertTrue(self.ss.PFlow.initialized, "PFlow initialization failed!") + + def test_trip_gen(self): + """ + Test generator tripping. + """ + stg = 2 + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=0) + + self.ss.PFlow.update() + self.ss.PFlow.run() + self.assertTrue(self.ss.PFlow.converged, "PFlow did not converge under generator trip!") + self.assertAlmostEqual(self.ss.PFlow.get(src='pg', attr='v', idx=stg), + 0, places=6, + msg="Generator trip does not take effect!") + + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=1) + + def test_trip_line(self): + """ + Test line tripping. + """ + self.ss.Line.set(src='u', attr='v', idx='Line_3', value=0) + + self.ss.PFlow.update() + self.ss.PFlow.run() + self.assertTrue(self.ss.PFlow.converged, "PFlow did not converge under line trip!") + self.assertAlmostEqual(self.ss.PFlow.get(src='plf', attr='v', idx='Line_3'), + 0, places=6, + msg="Line trip does not take effect!") + + self.ss.Line.alter(src='u', idx='Line_3', value=1) + + def test_set_load(self): + """ + Test setting and tripping load. + """ + # --- run PFlow --- + self.ss.PFlow.run() + pgs = self.ss.PFlow.pg.v.sum() + + # --- set load --- + self.ss.PQ.set(src='p0', attr='v', idx='PQ_1', value=0.1) + self.ss.PFlow.update() + + self.ss.PFlow.run() + pgs_pqt = self.ss.PFlow.pg.v.sum() + self.assertLess(pgs_pqt, pgs, "Load set does not take effect!") + + # --- trip load --- + self.ss.PQ.set(src='p0', attr='v', idx='PQ_1', value=0) + self.ss.PFlow.update() + + self.ss.PFlow.run() + pgs_pqt2 = self.ss.PFlow.pg.v.sum() + self.assertLess(pgs_pqt2, pgs_pqt, "Load trip does not take effect!") + + +class TestACOPF(unittest.TestCase): + """ + Test routine `ACOPF`. + """ + + def setUp(self) -> None: + self.ss = ams.load(ams.get_case('matpower/case14.m'), + setup=True, no_output=True, default_config=True) + + def test_init(self): + """ + Test initialization. + """ + self.ss.ACOPF.init() + self.assertTrue(self.ss.ACOPF.initialized, "ACOPF initialization failed!") + + def test_trip_gen(self): + """ + Test generator tripping. + """ + stg = 2 + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=0) + + self.ss.ACOPF.update() + self.ss.ACOPF.run() + self.assertTrue(self.ss.ACOPF.converged, "ACOPF did not converge under generator trip!") + self.assertAlmostEqual(self.ss.ACOPF.get(src='pg', attr='v', idx=stg), + 0, places=6, + msg="Generator trip does not take effect!") + + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=1) + + def test_trip_line(self): + """ + Test line tripping. + """ + self.ss.Line.set(src='u', attr='v', idx='Line_3', value=0) + + self.ss.ACOPF.update() + self.ss.ACOPF.run() + self.assertTrue(self.ss.ACOPF.converged, "ACOPF did not converge under line trip!") + self.assertAlmostEqual(self.ss.ACOPF.get(src='plf', attr='v', idx='Line_3'), + 0, places=6, + msg="Line trip does not take effect!") + + self.ss.Line.alter(src='u', idx='Line_3', value=1) + + def test_set_load(self): + """ + Test setting and tripping load. + """ + # --- run ACOPF --- + self.ss.ACOPF.run() + pgs = self.ss.ACOPF.pg.v.sum() + + # --- set load --- + self.ss.PQ.set(src='p0', attr='v', idx='PQ_1', value=0.1) + self.ss.ACOPF.update() + + self.ss.ACOPF.run() + pgs_pqt = self.ss.ACOPF.pg.v.sum() + self.assertLess(pgs_pqt, pgs, "Load set does not take effect!") + + # --- trip load --- + self.ss.PQ.set(src='p0', attr='v', idx='PQ_1', value=0) + self.ss.ACOPF.update() + + self.ss.ACOPF.run() + pgs_pqt2 = self.ss.ACOPF.pg.v.sum() + self.assertLess(pgs_pqt2, pgs_pqt, "Load trip does not take effect!") diff --git a/tests/test_rtn_rted.py b/tests/test_rtn_rted.py new file mode 100644 index 00000000..f112ada7 --- /dev/null +++ b/tests/test_rtn_rted.py @@ -0,0 +1,176 @@ +import unittest +import numpy as np + +import ams +from ams.shared import skip_unittest_without_MIP + + +class TestRTED(unittest.TestCase): + """ + Test methods of `RTED`. + """ + + def setUp(self) -> None: + self.ss = ams.load(ams.get_case("5bus/pjm5bus_demo.xlsx"), + setup=True, + default_config=True, + no_output=True, + ) + self.ss.RTED.run(solver='CLARABEL') + + def test_init(self): + """ + Test `RTED.init()` method. + """ + self.ss.RTED.init() + self.assertTrue(self.ss.RTED.initialized, "RTED initialization failed!") + + def test_trip_gen(self): + """ + Test generator tripping. + """ + stg = 'PV_1' + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=0) + + self.ss.RTED.update() + self.ss.RTED.run(solver='CLARABEL') + self.assertTrue(self.ss.RTED.converged, "RTED did not converge under generator trip!") + self.assertAlmostEqual(self.ss.RTED.get(src='pg', attr='v', idx=stg), + 0, places=6, + msg="Generator trip does not take effect!") + + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=1) # reset + + def test_trip_line(self): + """ + Test line tripping. + """ + self.ss.Line.set(src='u', attr='v', idx='Line_3', value=0) + + self.ss.RTED.update() + self.ss.RTED.run(solver='CLARABEL') + self.assertTrue(self.ss.RTED.converged, "RTED did not converge under line trip!") + self.assertAlmostEqual(self.ss.RTED.get(src='plf', attr='v', idx='Line_3'), + 0, places=6, + msg="Line trip does not take effect!") + + self.ss.Line.alter(src='u', idx='Line_3', value=1) + + def test_set_load(self): + """ + Test setting and tripping load. + """ + self.ss.RTED.run(solver='CLARABEL') + obj = self.ss.RTED.obj.v + + # --- set load --- + self.ss.PQ.set(src='p0', attr='v', idx='PQ_1', value=0.1) + self.ss.RTED.update() + + self.ss.RTED.run(solver='CLARABEL') + obj_pqt = self.ss.RTED.obj.v + self.assertLess(obj_pqt, obj, "Load set does not take effect!") + + # --- trip load --- + self.ss.PQ.set(src='u', attr='v', idx='PQ_2', value=0) + self.ss.RTED.update() + + self.ss.RTED.run(solver='CLARABEL') + obj_pqt2 = self.ss.RTED.obj.v + self.assertLess(obj_pqt2, obj_pqt, "Load trip does not take effect!") + + def test_dc2ac(self): + """ + Test `RTED.init()` method. + """ + self.ss.RTED.dc2ac() + self.assertTrue(self.ss.RTED.converted, "AC conversion failed!") + self.assertTrue(self.ss.RTED.exec_time > 0, "Execution time is not greater than 0.") + + stg_idx = self.ss.StaticGen.get_idx() + pg_rted = self.ss.RTED.get(src='pg', attr='v', idx=stg_idx) + pg_acopf = self.ss.ACOPF.get(src='pg', attr='v', idx=stg_idx) + np.testing.assert_almost_equal(pg_rted, pg_acopf, decimal=3) + + bus_idx = self.ss.Bus.get_idx() + v_rted = self.ss.RTED.get(src='vBus', attr='v', idx=bus_idx) + v_acopf = self.ss.ACOPF.get(src='vBus', attr='v', idx=bus_idx) + np.testing.assert_almost_equal(v_rted, v_acopf, decimal=3) + + a_rted = self.ss.RTED.get(src='aBus', attr='v', idx=bus_idx) + a_acopf = self.ss.ACOPF.get(src='aBus', attr='v', idx=bus_idx) + np.testing.assert_almost_equal(a_rted, a_acopf, decimal=3) + + +class TestRTEDES(unittest.TestCase): + """ + Test routine `RTEDES`. + """ + + def setUp(self) -> None: + self.ss = ams.load(ams.get_case("5bus/pjm5bus_uced_esd1.xlsx"), + setup=True, default_config=True, no_output=True) + + def test_init(self): + """ + Test initialization. + """ + self.ss.RTEDES.init() + self.assertTrue(self.ss.RTEDES.initialized, "RTEDES initialization failed!") + + @skip_unittest_without_MIP + def test_trip_gen(self): + """ + Test generator tripping. + """ + stg = 'PV_1' + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=0) + + self.ss.RTEDES.update() + self.ss.RTEDES.run(solver='SCIP') + self.assertTrue(self.ss.RTEDES.converged, "RTEDES did not converge under generator trip!") + self.assertAlmostEqual(self.ss.RTEDES.get(src='pg', attr='v', idx=stg), + 0, places=6, + msg="Generator trip does not take effect!") + + self.ss.StaticGen.set(src='u', idx=stg, attr='v', value=1) + + @skip_unittest_without_MIP + def test_trip_line(self): + """ + Test line tripping. + """ + self.ss.Line.set(src='u', attr='v', idx=3, value=0) + + self.ss.RTEDES.update() + self.ss.RTEDES.run(solver='SCIP') + self.assertTrue(self.ss.RTEDES.converged, "RTEDES did not converge under line trip!") + self.assertAlmostEqual(self.ss.RTEDES.get(src='plf', attr='v', idx=3), + 0, places=6, + msg="Line trip does not take effect!") + + self.ss.Line.alter(src='u', idx=3, value=1) + + @skip_unittest_without_MIP + def test_set_load(self): + """ + Test setting and tripping load. + """ + self.ss.RTEDES.run(solver='SCIP') + pgs = self.ss.RTEDES.obj.v.sum() + + # --- set load --- + self.ss.PQ.set(src='p0', attr='v', idx='PQ_1', value=1) + self.ss.RTEDES.update() + + self.ss.RTEDES.run(solver='SCIP') + pgs_pqt = self.ss.RTEDES.obj.v.sum() + self.assertLess(pgs_pqt, pgs, "Load set does not take effect!") + + # --- trip load --- + self.ss.PQ.set(src='u', attr='v', idx='PQ_2', value=0) + self.ss.RTEDES.update() + + self.ss.RTEDES.run(solver='SCIP') + pgs_pqt2 = self.ss.RTEDES.obj.v.sum() + self.assertLess(pgs_pqt2, pgs_pqt, "Load trip does not take effect!") diff --git a/tests/test_rtn_uc.py b/tests/test_rtn_uc.py new file mode 100644 index 00000000..df15a6b2 --- /dev/null +++ b/tests/test_rtn_uc.py @@ -0,0 +1,165 @@ +import unittest +import numpy as np + +import ams +from ams.shared import skip_unittest_without_MIP + + +class TestUC(unittest.TestCase): + """ + Test routine `UC`. + """ + + def setUp(self) -> None: + self.ss = ams.load(ams.get_case("5bus/pjm5bus_demo.xlsx"), + setup=True, default_config=True, no_output=True) + # decrease load first + self.ss.PQ.set(src='p0', attr='v', idx=['PQ_1', 'PQ_2'], value=[0.3, 0.3]) + # run UC._initial_guess() + self.off_gen = self.ss.UC._initial_guess() + + def test_initial_guess(self): + """ + Test initial guess. + """ + u_off_gen = self.ss.StaticGen.get(src='u', idx=self.off_gen) + np.testing.assert_equal(u_off_gen, np.zeros_like(u_off_gen), + err_msg="UC._initial_guess() failed!") + + def test_init(self): + """ + Test initialization. + """ + self.ss.UC.init() + self.assertTrue(self.ss.UC.initialized, "UC initialization failed!") + + @skip_unittest_without_MIP + def test_trip_gen(self): + """ + Test generator tripping. + """ + self.ss.UC.run(solver='SCIP') + self.assertTrue(self.ss.UC.converged, "UC did not converge!") + pg_off_gen = self.ss.UC.get(src='pg', attr='v', idx=self.off_gen) + np.testing.assert_almost_equal(np.zeros_like(pg_off_gen), + pg_off_gen, decimal=6, + err_msg="Off generators are not turned off!") + + @skip_unittest_without_MIP + def test_set_load(self): + """ + Test setting and tripping load. + """ + self.ss.UC.run(solver='SCIP') + pgs = self.ss.UC.obj.v.sum() + + # --- set load --- + self.ss.PQ.set(src='p0', attr='v', idx='PQ_1', value=0.1) + self.ss.UC.update() + + self.ss.UC.run(solver='SCIP') + pgs_pqt = self.ss.UC.obj.v.sum() + self.assertLess(pgs_pqt, pgs, "Load set does not take effect!") + + # --- trip load --- + self.ss.PQ.alter(src='u', idx='PQ_2', value=0) + self.ss.UC.update() + + self.ss.UC.run(solver='SCIP') + pgs_pqt2 = self.ss.UC.obj.v.sum() + self.assertLess(pgs_pqt2, pgs_pqt, "Load trip does not take effect!") + + @skip_unittest_without_MIP + def test_trip_line(self): + """ + Test line tripping. + """ + self.ss.Line.set(src='u', attr='v', idx='Line_3', value=0) + self.ss.UC.update() + + self.ss.UC.run(solver='SCIP') + self.assertTrue(self.ss.UC.converged, "UC did not converge under line trip!") + plf_l3 = self.ss.UC.get(src='plf', attr='v', idx='Line_3') + np.testing.assert_almost_equal(np.zeros_like(plf_l3), + plf_l3, decimal=6) + + self.ss.Line.alter(src='u', idx='Line_3', value=1) + + +class TestUCES(unittest.TestCase): + """ + Test routine `UCES`. + """ + + def setUp(self) -> None: + self.ss = ams.load(ams.get_case("5bus/pjm5bus_uced_esd1.xlsx"), + setup=True, default_config=True, no_output=True) + # run `_initial_guess()` + self.off_gen = self.ss.UCES._initial_guess() + + def test_initial_guess(self): + """ + Test initial guess. + """ + u_off_gen = self.ss.StaticGen.get(src='u', idx=self.off_gen) + np.testing.assert_equal(u_off_gen, np.zeros_like(u_off_gen), + err_msg="UCES._initial_guess() failed!") + + def test_init(self): + """ + Test initialization. + """ + self.ss.UCES.init() + self.assertTrue(self.ss.UCES.initialized, "UCES initialization failed!") + + @skip_unittest_without_MIP + def test_trip_gen(self): + """ + Test generator tripping. + """ + self.ss.UCES.run(solver='SCIP') + self.assertTrue(self.ss.UCES.converged, "UCES did not converge!") + pg_off_gen = self.ss.UCES.get(src='pg', attr='v', idx=self.off_gen) + np.testing.assert_almost_equal(np.zeros_like(pg_off_gen), + pg_off_gen, decimal=6, + err_msg="Off generators are not turned off!") + + @skip_unittest_without_MIP + def test_set_load(self): + """ + Test setting and tripping load. + """ + self.ss.UCES.run(solver='SCIP') + pgs = self.ss.UCES.pg.v.sum() + + # --- set load --- + self.ss.PQ.set(src='p0', attr='v', idx='PQ_1', value=0.1) + self.ss.UCES.update() + + self.ss.UCES.run(solver='SCIP') + pgs_pqt = self.ss.UCES.pg.v.sum() + self.assertLess(pgs_pqt, pgs, "Load set does not take effect!") + + # --- trip load --- + self.ss.PQ.alter(src='u', idx='PQ_2', value=0) + self.ss.UCES.update() + + self.ss.UCES.run(solver='SCIP') + pgs_pqt2 = self.ss.UCES.pg.v.sum() + self.assertLess(pgs_pqt2, pgs_pqt, "Load trip does not take effect!") + + @skip_unittest_without_MIP + def test_trip_line(self): + """ + Test line tripping. + """ + self.ss.Line.set(src='u', attr='v', idx=3, value=0) + self.ss.UCES.update() + + self.ss.UCES.run(solver='SCIP') + self.assertTrue(self.ss.UCES.converged, "UCES did not converge under line trip!") + plf_l3 = self.ss.UCES.get(src='plf', attr='v', idx=3) + np.testing.assert_almost_equal(np.zeros_like(plf_l3), + plf_l3, decimal=6) + + self.ss.Line.alter(src='u', idx=3, value=1)