diff --git a/.github/workflows/tests-coverage.yml b/.github/workflows/tests-coverage.yml index e0a72b1cd..6bcc81f6d 100644 --- a/.github/workflows/tests-coverage.yml +++ b/.github/workflows/tests-coverage.yml @@ -20,13 +20,16 @@ jobs: include: - name-suffix: "coverage" os: ubuntu-latest - python-version: 3.8 + python-version: 3.9 - name-suffix: "basic" os: ubuntu-latest - python-version: 3.9 + python-version: "3.10" + - name-suffix: "basic" + os: ubuntu-latest + python-version: 3.11 - name-suffix: "basic" os: windows-latest - python-version: 3.8 + python-version: 3.9 steps: - name: Checkout repo @@ -38,7 +41,7 @@ jobs: python-version: ${{ matrix.python-version }} - name: Set up julia - if: runner.os == 'Linux' && matrix.python-version == 3.8 && matrix.name-suffix == 'coverage' + if: runner.os == 'Linux' uses: julia-actions/setup-julia@v1 with: version: "1.6" @@ -58,14 +61,20 @@ jobs: environment-file: eDisGo_env_dev.yml python-version: ${{ matrix.python-version }} - - name: Run tests - if: ${{ !(runner.os == 'Linux' && matrix.python-version == 3.8 && matrix.name-suffix == 'coverage') }} + - name: Run tests Linux + if: runner.os == 'Linux' && matrix.name-suffix != 'coverage' + run: | + python -m pip install pytest pytest-notebook + python -m pytest --runslow --runonlinux --disable-warnings --color=yes -v + + - name: Run tests Windows + if: runner.os == 'Windows' run: | python -m pip install pytest pytest-notebook python -m pytest --runslow --disable-warnings --color=yes -v - name: Run tests, coverage and send to coveralls - if: runner.os == 'Linux' && matrix.python-version == 3.8 && matrix.name-suffix == 'coverage' + if: runner.os == 'Linux' && matrix.python-version == 3.9 && matrix.name-suffix == 'coverage' run: | pip install pytest pytest-notebook coveralls coverage run --source=edisgo -m pytest --runslow --runonlinux --disable-warnings --color=yes -v diff --git a/doc/conf.py b/doc/conf.py index 851a3fb6d..30cafa8a5 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -71,8 +71,6 @@ ] # Files to ignore when building api documentation autoapi_ignore = [ - "*/flex_opt/curtailment.py", - "*/flex_opt/storage_positioning.py", "*/opf/timeseries_reduction.py", "*/opf/opf_solutions/*", ] @@ -128,16 +126,23 @@ def setup(sphinx): "shapely.%s", ), "ding0": ("https://dingo.readthedocs.io/en/dev/api/ding0.html#%s", "ding0.%s"), - "pypsa": ("https://pypsa.readthedocs.io/en/latest/components.html#%s", "pypsa.%s"), + "pypsa": ( + "https://pypsa.readthedocs.io/en/latest/user-guide/components.html#%s", + "pypsa.%s", + ), "plotly": ( "https://plotly.com/python-api-reference/generated/%s.html", "plotly.%s", ), } # ignore the following external links when checking the links -# stackoverflow is listed here because for some reason the link check fails for these +# stackoverflow and gurobi is listed here because for some reason +# the link check fails for these # in the github action, even though the link is correct -linkcheck_ignore = [r"https://stackoverflow.com*"] +linkcheck_ignore = [ + r"https://stackoverflow.com*", + r"https://support.gurobi.com/*", +] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] diff --git a/doc/dev_notes.rst b/doc/dev_notes.rst index e012ce874..5d7764e48 100644 --- a/doc/dev_notes.rst +++ b/doc/dev_notes.rst @@ -18,13 +18,12 @@ Installation using Linux ~~~~~~~~~~~~~~~~~~~~~~~~ To set up a source installation using linux simply use a virtual environment and install -the source code with pip. Make sure to use python3.7 or higher (recommended -python3.8). **After** setting up your virtual environment and activating it run the +the source code with pip. Make sure to use python3.9 or higher. **After** setting up your virtual environment and activating it run the following commands within your eDisGo directory: .. code-block:: bash - python -m pip install -e .[full] # install eDisGo from source + python -m pip install -e .[dev] # install eDisGo from source pre-commit install # install pre-commit hooks diff --git a/doc/index.rst b/doc/index.rst index dfebb4369..444e35f04 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -15,7 +15,7 @@ The toolbox currently includes: * `ding0 `_ tool for synthetic medium and low voltage grid topologies for the whole of Germany - * `OpenEnergy DataBase (oedb) `_ for + * `OpenEnergy DataBase (oedb) `_ for feed-in time series of fluctuating renewables and scenarios for future power plant park of Germany * `demandlib `_ for electrical load time series diff --git a/doc/quickstart.rst b/doc/quickstart.rst index 4ce437c05..bc537cc44 100644 --- a/doc/quickstart.rst +++ b/doc/quickstart.rst @@ -3,11 +3,11 @@ Getting started ================ +.. warning:: Make sure to use python 3.9 or higher! + Installation using Linux ------------------------- -.. warning:: Make sure to use python 3.8 or higher! - Install latest eDisGo version through pip. Therefore, we highly recommend using a virtual environment and its pip. @@ -21,8 +21,6 @@ You may also consider installing a developer version as detailed in Installation using Windows -------------------------- -.. warning:: Make sure to use python 3.8 or higher! - For Windows users we recommend using Anaconda and to install the geo stack using the conda-forge channel prior to installing eDisGo. You may use the provided `eDisGo_env.yml file `_ @@ -44,117 +42,29 @@ Installation using MacOS We don't have any experience with our package on MacOS yet! If you try eDisGo on MacOS we would be happy if you let us know about your experience! -Requirements for edisgoOPF package ----------------------------------- - -.. warning:: The non-linear optimal power flow is currently not maintained and might not work out of the box! - -To use the multiperiod optimal power flow that is provided in the julia package -edisgoOPF in eDisGo you additionally need to install julia version 1.1.1. -Download julia from -`julia download page `_ and -add it to your path (see -`platform specific instructions `_ -for more information). - -Before using the edisgoOPF julia package for the first time you need to -instantiate it. Therefore, in a terminal change directory to the edisgoOPF -package located in eDisGo/edisgo/opf/edisgoOPF and call julia from there. -Change to package mode by typing - -.. code-block:: bash - - ] - -Then activate the package: - -.. code-block:: bash - - (v1.0) pkg> activate . - -And finally instantiate it: - -.. code-block:: bash - - (SomeProject) pkg> instantiate - -.. _prerequisites: - -Additional linear solver -^^^^^^^^^^^^^^^^^^^^^^^^^ - -As with the default linear solver in Ipopt (local solver used in the OPF) -the limit for prolem sizes is reached quite quickly, you may want to instead use -the solver HSL_MA97. -The steps required to set up HSL are also described in the -`Ipopt Documentation `_. -Here is a short version for reference: - -First, you need to obtain an academic license for HSL Solvers. -Under https://licences.stfc.ac.uk/product/coin-hsl download the sources for Coin-HSL Full (Stable). -You will need to provide an institutional e-mail to gain access. - -Unpack the tar.gz: +Additional requirements for Optimal Power Flow +--------------------------------------------------- -.. code-block:: bash - - tar -xvzf coinhsl-2014.01.10.tar.gz - -To install the solver, clone the Ipopt Third Party HSL tools: - -.. code-block:: bash - - git clone https://github.com/coin-or-tools/ThirdParty-HSL.git - cd ThirdParty-HSL - - -Under `ThirdParty-HSL`, create a folder for the HSL sources named `coinhsl` and -copy the contents of the HSL archive into it. -Under Ubuntu, you'll need BLAS, LAPACK and GCC for Fortran. If you don't have them, install them via: - -.. code-block:: bash - - sudo apt-get install libblas-dev liblapack-dev gfortran +In order to use the optimal power flow, you additionally need: -You can then configure and install your HSL Solvers: +1. **Julia**: Version 1.6.7 is required. +2. **Gurobi Optimizer**: A powerful optimization solver. -.. code-block:: bash - - ./configure - make - sudo make install - -To make Ipopt pick up the solver, you need to add it to your path. -During install, there will be an output that tells you where the libraries have -been put. Usually like this: +Installation Steps +^^^^^^^^^^^^^^^^^^^ -.. code-block:: bash - - Libraries have been installed in: - /usr/local/lib +1. Install Julia 1.6.7 -Add this path to the variable `LD_LIBRARY_PATH`: +Download Julia 1.6.7 from the `Julia LTS releases page `_. -.. code-block:: bash - - export LD_LIBRARY="/usr/local/bin":$LD_LIBRARY_PATH - -You might also want to add this to your .bashrc to make it persistent. - -For some reason, Ipopt looks for a library named `libhsl.so`, which is not what -the file is named, so we'll also need to provide a symlink: - -.. code-block:: bash +Install Julia by following the instructions in the `Julia installation guide `_. Make sure to add Julia to your system path. - cd /usr/local/lib - ln -s libcoinhsl.so libhsl.so -MA97 should now work and can be called from Julia with: +2. Install Gurobi -.. code-block:: julia - JuMP.setsolver(pm.model,IpoptSolver(linear_solver="ma97")) +Follow the `Gurobi installation guide `_ to install Gurobi and add it to your system path. Prerequisites ------------- @@ -188,8 +98,8 @@ Aside from grid topology data you may eventually need a dataset on future installation of power plants. You may therefore use the scenarios developed in the `open_eGo `_ project that are available in the -`OpenEnergy DataBase (oedb) `_ -hosted on the `OpenEnergy Platform (OEP) `_. +`OpenEnergy DataBase (oedb) `_ +hosted on the `OpenEnergy Platform (OEP) `_. eDisGo provides an interface to the oedb using the package `ego.io `_. ego.io gives you a python SQL-Alchemy representations of the oedb and access to it by using the diff --git a/doc/usage_details.rst b/doc/usage_details.rst index aac09982f..a35ba1e20 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -241,7 +241,7 @@ This mode can be invoked as follows: For the following components you can use existing time series: * Fluctuating generators: Feed-in time series for solar and wind power plants can be - retrieved from the `OpenEnergy DataBase `_. + retrieved from the `OpenEnergy DataBase `_. * Conventional loads: Standard load profiles for the different sectors residential, commercial, agricultural and industrial are generated using the oemof `demandlib `_. diff --git a/eDisGo_env.yml b/eDisGo_env.yml index 45b797e16..c9f4e6239 100644 --- a/eDisGo_env.yml +++ b/eDisGo_env.yml @@ -3,7 +3,7 @@ channels: - conda-forge - defaults dependencies: - - python >= 3.8, < 3.10 + - python >= 3.9, <= 3.11 - pip - pandas >= 1.4, < 2.2.0 - conda-forge::fiona @@ -16,6 +16,6 @@ dependencies: - conda-forge::pygeos - conda-forge::contextily - conda-forge::descartes - - conda-forge::pypsa >= 0.17.0, <= 0.20.1 + - conda-forge::pypsa == 0.26.2 - pip: - eDisGo diff --git a/eDisGo_env_dev.yml b/eDisGo_env_dev.yml index ae86632ac..859e39546 100644 --- a/eDisGo_env_dev.yml +++ b/eDisGo_env_dev.yml @@ -3,7 +3,7 @@ channels: - conda-forge - defaults dependencies: - - python >= 3.8, < 3.10 + - python >= 3.9, <= 3.11 - pip - pandas >= 1.4, < 2.2.0 - conda-forge::fiona @@ -16,6 +16,6 @@ dependencies: - conda-forge::pygeos - conda-forge::contextily - conda-forge::descartes - - conda-forge::pypsa >= 0.17.0, <= 0.20.1 + - conda-forge::pypsa == 0.26.2 - pip: - -e .[dev] diff --git a/edisgo/edisgo.py b/edisgo/edisgo.py index dca1660b2..3930a5659 100755 --- a/edisgo/edisgo.py +++ b/edisgo/edisgo.py @@ -417,7 +417,7 @@ def set_time_series_active_power_predefined( Technology- and weather cell-specific hourly feed-in time series are obtained from the `OpenEnergy DataBase - `_. See + `_. See :func:`edisgo.io.timeseries_import.feedin_oedb` for more information. This option requires that the parameter `engine` is provided in case @@ -478,7 +478,7 @@ def set_time_series_active_power_predefined( Sets active power demand time series using individual hourly electricity load time series for one year obtained from the `OpenEnergy DataBase - `_. + `_. This option requires that the parameters `engine` and `scenario` are provided. For further settings, the parameter `timeindex` can also be @@ -933,7 +933,7 @@ def import_generators(self, generator_scenario=None, **kwargs): Gets generator park for specified scenario and integrates generators into grid. The generator data is retrieved from the - `open energy platform `_. Decommissioned + `open energy platform `_. Decommissioned generators are removed from the grid, generators with changed capacity updated and new generators newly integrated into the grid. @@ -1003,7 +1003,7 @@ def analyze( Conducts a static, non-linear power flow analysis. Conducts a static, non-linear power flow analysis using - `PyPSA `_ and writes results (active, reactive and apparent power as well as current on lines and voltages at buses) to :class:`~.network.results.Results` @@ -1928,7 +1928,7 @@ def import_electromobility( Imports electromobility data and integrates charging points into grid. Electromobility data can be obtained from the `OpenEnergy DataBase - `_ or from self-provided + `_ or from self-provided data. In case you want to use self-provided data, it needs to be generated using the tools `SimBEV `_ (required version: @@ -1960,7 +1960,7 @@ def import_electromobility( * "oedb" Electromobility data is obtained from the `OpenEnergy DataBase - `_. + `_. This option requires that the parameters `scenario` and `engine` are provided. @@ -2143,7 +2143,7 @@ def import_heat_pumps(self, scenario, engine, timeindex=None, import_types=None) between two scenarios: 'eGon2035' and 'eGon100RE'. The data is retrieved from the - `open energy platform `_. + `open energy platform `_. # ToDo Add information on scenarios and from which tables data is retrieved. @@ -2305,7 +2305,7 @@ def apply_heat_pump_operating_strategy( def import_dsm(self, scenario: str, engine: Engine, timeindex=None): """ Gets industrial and CTS DSM profiles from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Profiles comprise minimum and maximum load increase in MW as well as maximum energy pre- and postponing in MWh. The data is written to the @@ -2356,7 +2356,7 @@ def import_home_batteries( between two scenarios: 'eGon2035' and 'eGon100RE'. The data is retrieved from the - `open energy platform `_. + `open energy platform `_. The batteries are integrated into the grid (added to :attr:`~.network.topology.Topology.storage_units_df`) based on their building diff --git a/edisgo/flex_opt/costs.py b/edisgo/flex_opt/costs.py index 8bd63d9b7..7cbe359b1 100644 --- a/edisgo/flex_opt/costs.py +++ b/edisgo/flex_opt/costs.py @@ -1,3 +1,4 @@ +import logging import os import pandas as pd @@ -7,6 +8,8 @@ from edisgo.tools.geo import proj2equidistant +logger = logging.getLogger(__name__) + def grid_expansion_costs(edisgo_obj, without_generator_import=False): """ @@ -67,7 +70,8 @@ def _get_transformer_costs(trafos): costs_trafos = pd.DataFrame( { "costs_transformers": len(hvmv_trafos) - * [float(edisgo_obj.config["costs_transformers"]["mv"])] + * [float(edisgo_obj.config["costs_transformers"]["mv"])], + "voltage_level": len(hvmv_trafos) * ["hv/mv"], }, index=hvmv_trafos, ) @@ -77,13 +81,14 @@ def _get_transformer_costs(trafos): pd.DataFrame( { "costs_transformers": len(mvlv_trafos) - * [float(edisgo_obj.config["costs_transformers"]["lv"])] + * [float(edisgo_obj.config["costs_transformers"]["lv"])], + "voltage_level": len(mvlv_trafos) * ["mv/lv"], }, index=mvlv_trafos, ), ] ) - return costs_trafos.loc[trafos.index, "costs_transformers"].values + return costs_trafos.loc[trafos.index, :] def _get_line_costs(lines_added): costs_lines = line_expansion_costs(edisgo_obj, lines_added.index) @@ -107,9 +112,8 @@ def _get_line_costs(lines_added): # costs for transformers if not equipment_changes.empty: transformers = equipment_changes[ - equipment_changes.index.isin( - [f"{_}_station" for _ in edisgo_obj.topology._grids_repr] - ) + equipment_changes.equipment.str.contains("Transformer") + | equipment_changes.equipment.str.contains("transformer") ] added_transformers = transformers[transformers["change"] == "added"] removed_transformers = transformers[transformers["change"] == "removed"] @@ -129,15 +133,16 @@ def _get_line_costs(lines_added): ) trafos = all_trafos.loc[added_transformers["equipment"]] # calculate costs for each transformer + transformer_costs = _get_transformer_costs(trafos) costs = pd.concat( [ costs, pd.DataFrame( { "type": trafos.type_info.values, - "total_costs": _get_transformer_costs(trafos), + "total_costs": transformer_costs.costs_transformers, "quantity": len(trafos) * [1], - "voltage_level": len(trafos) * ["mv/lv"], + "voltage_level": transformer_costs.voltage_level, }, index=trafos.index, ), @@ -161,6 +166,19 @@ def _get_line_costs(lines_added): .sum() .loc[lines_added_unique, ["quantity"]] ) + # use the minimum of quantity and num_parallel, as sometimes lines are added + # and in a next reinforcement step removed again, e.g. when feeder is split + # at 2/3 and a new single standard line is added + lines_added = pd.merge( + lines_added, + edisgo_obj.topology.lines_df.loc[:, ["num_parallel"]], + how="left", + left_index=True, + right_index=True, + ) + lines_added["quantity_added"] = lines_added.loc[ + :, ["quantity", "num_parallel"] + ].min(axis=1) lines_added["length"] = edisgo_obj.topology.lines_df.loc[ lines_added.index, "length" ] @@ -176,9 +194,9 @@ def _get_line_costs(lines_added): ].values, "total_costs": line_costs.costs.values, "length": ( - lines_added.quantity * lines_added.length + lines_added.quantity_added * lines_added.length ).values, - "quantity": lines_added.quantity.values, + "quantity": lines_added.quantity_added.values, "voltage_level": line_costs.voltage_level.values, }, index=lines_added.index, @@ -288,3 +306,69 @@ def line_expansion_costs(edisgo_obj, lines_names=None): ] ) return costs_lines.loc[lines_df.index] + + +def transformer_expansion_costs(edisgo_obj, transformer_names=None): + """ + Returns costs per transformer in kEUR as well as voltage level they are in. + + Parameters + ----------- + edisgo_obj : :class:`~.EDisGo` + eDisGo object + transformer_names: None or list(str) + List of names of transformers to return cost information for. If None, it is + returned for all transformers in + :attr:`~.network.topology.Topology.transformers_df` and + :attr:`~.network.topology.Topology.transformers_hvmv_df`. + + Returns + ------- + costs: :pandas:`pandas.DataFrame` + Dataframe with names of transformers in index and columns 'costs' with + costs per transformer in kEUR and 'voltage_level' with information on voltage + level the transformer is in. + + """ + transformers_df = pd.concat( + [ + edisgo_obj.topology.transformers_df.copy(), + edisgo_obj.topology.transformers_hvmv_df.copy(), + ] + ) + if transformer_names is not None: + transformers_df = transformers_df.loc[transformer_names, ["type_info"]] + + if len(transformers_df) == 0: + return pd.DataFrame(columns=["costs", "voltage_level"]) + + hvmv_transformers = transformers_df[ + transformers_df.index.isin(edisgo_obj.topology.transformers_hvmv_df.index) + ].index + mvlv_transformers = transformers_df[ + transformers_df.index.isin(edisgo_obj.topology.transformers_df.index) + ].index + + costs_hvmv = float(edisgo_obj.config["costs_transformers"]["mv"]) + costs_mvlv = float(edisgo_obj.config["costs_transformers"]["lv"]) + + costs_df = pd.DataFrame( + { + "costs": costs_hvmv, + "voltage_level": "hv/mv", + }, + index=hvmv_transformers, + ) + costs_df = pd.concat( + [ + costs_df, + pd.DataFrame( + { + "costs": costs_mvlv, + "voltage_level": "mv/lv", + }, + index=mvlv_transformers, + ), + ] + ) + return costs_df diff --git a/edisgo/flex_opt/curtailment.py b/edisgo/flex_opt/curtailment.py deleted file mode 100644 index 484df444d..000000000 --- a/edisgo/flex_opt/curtailment.py +++ /dev/null @@ -1,782 +0,0 @@ -import logging - -import pandas as pd - -from pyomo.environ import ( - ConcreteModel, - Constraint, - Objective, - Param, - Set, - Var, - minimize, -) -from pyomo.opt import SolverFactory - -from edisgo.io import pypsa_io - - -def voltage_based( - feedin, generators, curtailment_timeseries, edisgo, curtailment_key, **kwargs -): - """ - Implements curtailment methodology 'voltage-based'. - - ToDo: adapt to refactored code! - - The curtailment that has to be met in each time step is allocated depending - on the exceedance of the allowed voltage deviation at the nodes of the - generators. The higher the exceedance, the higher the curtailment. - - The optional parameter `voltage_threshold` specifies the threshold for the - exceedance of the allowed voltage deviation above which a generator is - curtailed. By default it is set to zero, meaning that all generators at - nodes with voltage deviations that exceed the allowed voltage deviation are - curtailed. Generators at nodes where the allowed voltage deviation is not - exceeded are not curtailed. In the case that the required curtailment - exceeds the weather-dependent availability of all generators with voltage - deviations above the specified threshold, the voltage threshold is lowered - in steps of 0.01 p.u. until the curtailment target can be met. - - Above the threshold, the curtailment is proportional to the exceedance of - the allowed voltage deviation. In order to find the linear relation between - the curtailment and the voltage difference a linear problem is formulated - and solved using the python package pyomo. See documentation for further - information. - - Parameters - ---------- - feedin : :pandas:`pandas.DataFrame` - Dataframe holding the feed-in of each generator in kW for the - technology (and weather cell) specified in `curtailment_key` parameter. - Index of the dataframe is a - :pandas:`pandas.DatetimeIndex`. Columns are the - representatives of the fluctuating generators. - generators : :pandas:`pandas.DataFrame` - Dataframe with all generators of the type (and in weather cell) - specified in `curtailment_key` parameter. See return value of - :func:`edisgo.network.tools.get_gen_info` for more information. - curtailment_timeseries : :pandas:`pandas.Series` - The curtailment in kW to be distributed amongst the generators in - `generators` parameter. Index of the series is a - :pandas:`pandas.DatetimeIndex`. - edisgo : :class:`~.edisgo.EDisGo` - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment is specified for. - voltage_threshold: :obj:`float` - The node voltage below which no curtailment is assigned to the - respective generator if not necessary. Default: 0.0. - solver: :obj:`str` - The solver used to optimize the curtailment assigned to the generator. - Possible options are: - - * 'cbc' - coin-or branch and cut solver - * 'glpk' - gnu linear programming kit solver - * any other available compatible with 'pyomo' like 'gurobi' - or 'cplex' - - Default: 'cbc' - - """ - - raise NotImplementedError - - voltage_threshold = pd.Series( - kwargs.get("voltage_threshold", 0.0), - index=curtailment_timeseries.index, - ) - solver = kwargs.get("solver", "cbc") - combined_analysis = kwargs.get("combined_analysis", False) - - # get the voltages at the generators - if not edisgo.network.pypsa.edisgo_mode: - voltages_lv_gens = edisgo.network.results.v_res( - nodes_df=generators.loc[(generators.voltage_level == "lv")].index, - level="lv", - ) - else: - # if only MV topology was analyzed (edisgo_mode = 'mv') all LV - # generators are assigned the voltage at the corresponding station's - # primary side - lv_gens = generators[generators.voltage_level == "lv"] - voltages_lv_stations = edisgo.network.results.v_res( - nodes_df=[_.station for _ in lv_gens.grid.unique()], level="mv" - ) - voltages_lv_gens = pd.DataFrame() - for lv_gen in lv_gens.index: - voltages_lv_gens[repr(lv_gen)] = voltages_lv_stations[ - repr(lv_gen.grid.station) - ] - voltages_mv_gens = edisgo.network.results.v_res( - nodes_df=generators.loc[(generators.voltage_level == "mv")].index, - level="mv", - ) - voltages_gens = voltages_lv_gens.join(voltages_mv_gens) - - # get allowed voltage deviations from config - if not combined_analysis: - allowed_voltage_dev_mv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["mv_feed-in_case_max_v_deviation"] - allowed_voltage_diff_lv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["lv_feed-in_case_max_v_deviation"] - else: - allowed_voltage_dev_mv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["mv_lv_feed-in_case_max_v_deviation"] - allowed_voltage_diff_lv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["mv_lv_feed-in_case_max_v_deviation"] - - # assign allowed voltage deviation to each generator - if not edisgo.network.pypsa.edisgo_mode: - # for edisgo_mode = None - - # get voltages at stations - grids = list(set(generators.grid)) - lv_stations = [_.station for _ in grids if "LVStation" in repr(_.station)] - voltage_lv_stations = edisgo.network.results.v_res( - nodes_df=lv_stations, level="lv" - ) - voltages_mv_station = edisgo.network.results.v_res( - nodes_df=[edisgo.network.mv_grid.station], level="mv" - ) - voltages_stations = voltage_lv_stations.join(voltages_mv_station) - - # assign allowed voltage deviation - generators["allowed_voltage_dev"] = generators.voltage_level.apply( - lambda _: allowed_voltage_diff_lv if _ == "lv" else allowed_voltage_dev_mv - ) - - # calculate voltage difference from generator node to station - voltage_gens_diff = pd.DataFrame() - for gen in voltages_gens.columns: - station = generators[generators.gen_repr == gen].grid.values[0].station - voltage_gens_diff[gen] = ( - voltages_gens.loc[:, gen] - - voltages_stations.loc[:, repr(station)] - - generators[generators.gen_repr == gen].allowed_voltage_dev.iloc[0] - ) - - else: - # for edisgo_mode = 'mv' - - station = edisgo.network.mv_grid.station - # get voltages at HV/MV station - voltages_station = edisgo.network.results.v_res(nodes_df=[station], level="mv") - - # assign allowed voltage deviation - generators["allowed_voltage_dev"] = allowed_voltage_dev_mv - - # calculate voltage difference from generator node to station - voltage_gens_diff = pd.DataFrame() - for gen in voltages_gens.columns: - voltage_gens_diff[gen] = ( - voltages_gens.loc[:, gen] - - voltages_station.loc[:, repr(station)] - - generators[generators.gen_repr == gen].allowed_voltage_dev.iloc[0] - ) - - # for every time step check if curtailment can be fulfilled, otherwise - # reduce voltage threshold; set feed-in of generators below voltage - # threshold to zero, so that they cannot be curtailed - for ts in curtailment_timeseries.index: - # get generators with voltage higher than threshold - gen_pool = voltage_gens_diff.loc[ - ts, voltage_gens_diff.loc[ts, :] > voltage_threshold.loc[ts] - ].index - # if curtailment cannot be fulfilled lower voltage threshold - while sum(feedin.loc[ts, gen_pool]) < curtailment_timeseries.loc[ts]: - voltage_threshold.loc[ts] = voltage_threshold.loc[ts] - 0.01 - gen_pool = voltage_gens_diff.loc[ - ts, voltage_gens_diff.loc[ts, :] > voltage_threshold.loc[ts] - ].index - # set feed-in of generators below voltage threshold to zero, so that - # they cannot be curtailed - gen_pool_out = voltage_gens_diff.loc[ - ts, voltage_gens_diff.loc[ts, :] <= voltage_threshold.loc[ts] - ].index - feedin.loc[ts, gen_pool_out] = 0 - - # only optimize for time steps where curtailment is greater than zero - timeindex = curtailment_timeseries[curtailment_timeseries > 0].index - if not timeindex.empty: - curtailment = _optimize_voltage_based_curtailment( - feedin, - voltage_gens_diff, - curtailment_timeseries, - voltage_threshold, - timeindex, - solver, - ) - else: - curtailment = pd.DataFrame() - - # set curtailment for other time steps to zero - curtailment = pd.concat( - [ - curtailment, - pd.DataFrame( - 0, - columns=feedin.columns, - index=curtailment_timeseries[curtailment_timeseries <= 0].index, - ), - ] - ) - - # check if curtailment target was met - _check_curtailment_target(curtailment, curtailment_timeseries, curtailment_key) - - # assign curtailment to individual generators - _assign_curtailment(curtailment, edisgo, generators, curtailment_key) - - -def _optimize_voltage_based_curtailment( - feedin, voltage_pu, total_curtailment, voltage_threshold, timeindex, solver -): - """ - Formulates and solves linear problem to find linear relation between - curtailment and node voltage. - - ToDo: adapt to refactored code! - - Parameters - ------------ - feedin : :pandas:`pandas.DataFrame` - See `feedin` parameter in - :func:`edisgo.flex_opt.curtailment.voltage_based` for more information. - voltage_pu : :pandas:`pandas.DataFrame - Dataframe containing voltages in p.u. at the generator nodes. Index - of the dataframe is a :pandas:`pandas.DatetimeIndex`, - columns are the generator representatives. - total_curtailment : :pandas:`pandas.Series` - Series containing the specific curtailment in kW to be allocated to the - generators. The index is a - :pandas:`pandas.DatetimeIndex`. - voltage_threshold : :pandas:`pandas.Series` - Series containing the voltage thresholds in p.u. below which no - generator curtailment will occur. The index is a - :pandas:`pandas.DatetimeIndex`. - solver : :obj:`str` - The solver used to optimize the linear problem. Default: 'cbc'. - - Returns - ------- - :pandas:`pandas.DataFrame` - Dataframe containing the curtailment in kW per generator and time step - feed-in was provided for in `feedin` parameter. Index is a - :pandas:`pandas.DatetimeIndex`, columns are the - generator representatives. - - """ - - raise NotImplementedError - - logging.debug("Start curtailment optimization.") - - v_max = voltage_pu.max(axis=1) - generators = feedin.columns - - # additional curtailment factors - cf_add = pd.DataFrame(index=timeindex) - for gen in generators: - cf_add[gen] = abs( - (voltage_pu.loc[timeindex, gen] - v_max[timeindex]) - / (voltage_threshold[timeindex] - v_max[timeindex]) - ) - - # curtailment factors - cf = pd.DataFrame(index=timeindex) - for gen in generators: - cf[gen] = abs( - (voltage_pu.loc[timeindex, gen] - voltage_threshold[timeindex]) - / (v_max[timeindex] - voltage_threshold[timeindex]) - ) - - # initialize model - model = ConcreteModel() - - # add sets - model.T = Set(initialize=timeindex) - model.G = Set(initialize=generators) - - # add parameters - def feedin_init(model, t, g): - return feedin.loc[t, g] - - model.feedin = Param(model.T, model.G, initialize=feedin_init) - - def voltage_pu_init(model, t, g): - return voltage_pu.loc[t, g] - - model.voltage_pu = Param(model.T, model.G, initialize=voltage_pu_init) - - def cf_add_init(model, t, g): - return cf_add.loc[t, g] - - model.cf_add = Param(model.T, model.G, initialize=cf_add_init) - - def cf_init(model, t, g): - return cf.loc[t, g] - - model.cf = Param(model.T, model.G, initialize=cf_init) - - def total_curtailment_init(model, t): - return total_curtailment.loc[t] - - model.total_curtailment = Param(model.T, initialize=total_curtailment_init) - - # add variables - model.offset = Var(model.T, bounds=(0, 1)) - model.cf_max = Var(model.T, bounds=(0, 1)) - - def curtailment_init(model, t, g): - return (0, feedin.loc[t, g]) - - model.c = Var(model.T, model.G, bounds=curtailment_init) - - # add objective - def obj_rule(model): - expr = sum(model.offset[t] * 100 for t in model.T) - return expr - - model.obj = Objective(rule=obj_rule, sense=minimize) - - # add constraints - # curtailment per generator constraints - def curtail(model, t, g): - return ( - model.cf[t, g] * model.cf_max[t] * model.feedin[t, g] - + model.cf_add[t, g] * model.offset[t] * model.feedin[t, g] - - model.c[t, g] - == 0 - ) - - model.curtailment = Constraint(model.T, model.G, rule=curtail) - - # total curtailment constraint - def total_curtailment(model, t): - return sum(model.c[t, g] for g in model.G) == model.total_curtailment[t] - - model.sum_curtailment = Constraint(model.T, rule=total_curtailment) - - # solve - solver = SolverFactory(solver) - results = solver.solve(model, tee=False) - - # load results back into model - model.solutions.load_from(results) - - return pd.DataFrame( - {g: [model.c[t, g].value for t in model.T] for g in model.G}, - index=model.T, - ) - - -def feedin_proportional( - feedin, generators, curtailment_timeseries, edisgo, curtailment_key, **kwargs -): - """ - Implements curtailment methodology 'feedin-proportional'. - - ToDo: adapt to refactored code! - - The curtailment that has to be met in each time step is allocated - equally to all generators depending on their share of total - feed-in in that time step. - - Parameters - ---------- - feedin : :pandas:`pandas.DataFrame` - Dataframe holding the feed-in of each generator in kW for the - technology (and weather cell) specified in `curtailment_key` parameter. - Index of the dataframe is a - :pandas:`pandas.DatetimeIndex`. Columns are the - representatives of the fluctuating generators. - generators : :pandas:`pandas.DataFrame` - Dataframe with all generators of the type (and in weather cell) - specified in `curtailment_key` parameter. See return value of - :func:`edisgo.network.tools.get_gen_info` for more information. - curtailment_timeseries : :pandas:`pandas.Series` - The curtailment in kW to be distributed amongst the generators in - `generators` parameter. Index of the series is a - :pandas:`pandas.DatetimeIndex`. - edisgo : :class:`~.edisgo.EDisGo` - curtailment_key::obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment is specified for. - - """ - raise NotImplementedError - - # calculate curtailment in each time step of each generator - curtailment = feedin.divide(feedin.sum(axis=1), axis=0).multiply( - curtailment_timeseries, axis=0 - ) - - # substitute NaNs from division with 0 by 0 - curtailment.fillna(0, inplace=True) - - # check if curtailment target was met - _check_curtailment_target(curtailment, curtailment_timeseries, curtailment_key) - - # assign curtailment to individual generators - _assign_curtailment(curtailment, edisgo, generators, curtailment_key) - - -def _check_curtailment_target(curtailment, curtailment_target, curtailment_key): - """ - Raises an error if curtailment target was not met in any time step. - - ToDo: adapt to refactored code! - - Parameters - ----------- - curtailment : :pandas:`pandas.DataFrame` - Dataframe containing the curtailment in kW per generator and time step. - Index is a :pandas:`pandas.DatetimeIndex`, columns are - the generator representatives. - curtailment_target : :pandas:`pandas.Series` - The curtailment in kW that was to be distributed amongst the - generators. Index of the series is a - :pandas:`pandas.DatetimeIndex`. - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment was specified for. - - """ - raise NotImplementedError - - if not (abs(curtailment.sum(axis=1) - curtailment_target) < 1e-1).all(): - message = "Curtailment target not met for {}.".format(curtailment_key) - logging.error(message) - raise TypeError(message) - - -def _assign_curtailment(curtailment, edisgo, generators, curtailment_key): - """ - Helper function to write curtailment time series to generator objects. - - ToDo: adapt to refactored code! - - This function also writes a list of the curtailed generators to curtailment - in :class:`edisgo.network.network.TimeSeries` and - :class:`edisgo.network.network.Results`. - - Parameters - ---------- - curtailment : :pandas:`pandas.DataFrame` - Dataframe containing the curtailment in kW per generator and time step - for all generators of the type (and in weather cell) specified in - `curtailment_key` parameter. Index is a - :pandas:`pandas.DatetimeIndex`, columns are the - generator representatives. - edisgo : :class:`~.edisgo.EDisGo` - generators : :pandas:`pandas.DataFrame` - Dataframe with all generators of the type (and in weather cell) - specified in `curtailment_key` parameter. See return value of - :func:`edisgo.network.tools.get_gen_info` for more information. - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment is specified for. - - """ - raise NotImplementedError - - gen_object_list = [] - for gen in curtailment.columns: - # get generator object from representative - gen_object = generators.loc[generators.gen_repr == gen].index[0] - # assign curtailment to individual generators - gen_object.curtailment = curtailment.loc[:, gen] - gen_object_list.append(gen_object) - - # set timeseries.curtailment - if edisgo.network.timeseries._curtailment: - edisgo.network.timeseries._curtailment.extend(gen_object_list) - edisgo.network.results._curtailment[curtailment_key] = gen_object_list - else: - edisgo.network.timeseries._curtailment = gen_object_list - # list needs to be copied, otherwise it will be extended every time - # a new key is added to results._curtailment - edisgo.network.results._curtailment = {curtailment_key: gen_object_list.copy()} - - -class CurtailmentControl: - """ - Allocates given curtailment targets to solar and wind generators. - - ToDo: adapt to refactored code! - - Parameters - ---------- - edisgo: :class:`edisgo.EDisGo` - The parent EDisGo object that this instance is a part of. - methodology : :obj:`str` - Defines the curtailment strategy. Possible options are: - - * 'feedin-proportional' - The curtailment that has to be met in each time step is allocated - equally to all generators depending on their share of total - feed-in in that time step. For more information see - :func:`edisgo.flex_opt.curtailment.feedin_proportional`. - * 'voltage-based' - The curtailment that has to be met in each time step is allocated - based on the voltages at the generator connection points and a - defined voltage threshold. Generators at higher voltages - are curtailed more. The default voltage threshold is 1.0 but - can be changed by providing the argument 'voltage_threshold'. This - method formulates the allocation of curtailment as a linear - optimization problem using :py:mod:`Pyomo` and requires a linear - programming solver like coin-or cbc (cbc) or gnu linear programming - kit (glpk). The solver can be specified through the parameter - 'solver'. For more information see - :func:`edisgo.flex_opt.curtailment.voltage_based`. - - curtailment_timeseries : :pandas:`pandas.Series` or \ - :pandas:`pandas.DataFrame`, optional - Series or DataFrame containing the curtailment time series in kW. Index - needs to be a :pandas:`pandas.DatetimeIndex`. - Provide a Series if the curtailment time series applies to wind and - solar generators. Provide a DataFrame if the curtailment time series - applies to a specific technology and optionally weather cell. In the - first case columns of the DataFrame are e.g. 'solar' and 'wind'; in the - second case columns need to be a - :pandas:`pandas.MultiIndex` with the first level containing - the type and the second level the weather cell ID. Default: None. - solver: :obj:`str` - The solver used to optimize the curtailment assigned to the generators - when 'voltage-based' curtailment methodology is chosen. - Possible options are: - - * 'cbc' - * 'glpk' - * any other available solver compatible with 'pyomo' such as 'gurobi' - or 'cplex' - - Default: 'cbc'. - voltage_threshold : :obj:`float` - Voltage below which no curtailment is assigned to the respective - generator if not necessary when 'voltage-based' curtailment methodology - is chosen. See :func:`edisgo.flex_opt.curtailment.voltage_based` for - more information. Default: 1.0. - mode : :obj:`str` - The `mode` is only relevant for curtailment method 'voltage-based'. - Possible options are None and 'mv'. Per default `mode` is None in which - case a power flow is conducted for both the MV and LV. In case `mode` - is set to 'mv' components in underlying LV grids are considered - aggregative. Default: None. - - """ - - # ToDo move some properties from topology here (e.g. peak_load, generators,...) - def __init__( - self, edisgo, methodology, curtailment_timeseries, mode=None, **kwargs - ): - raise NotImplementedError - - logging.info("Start curtailment methodology {}.".format(methodology)) - - self._check_timeindex(curtailment_timeseries, edisgo.topology) - - if methodology == "feedin-proportional": - curtailment_method = feedin_proportional - elif methodology == "voltage-based": - curtailment_method = voltage_based - else: - raise ValueError( - "{} is not a valid curtailment methodology.".format(methodology) - ) - - # check if provided mode is valid - if mode and mode != "mv": - raise ValueError("Provided mode {} is not a valid mode.") - - # get all fluctuating generators and their attributes (weather ID, - # type, etc.) - # TODO: Function get_gen_info does not exist - generators = get_gen_info( # noqa: F821 - edisgo.topology, "mvlv", fluctuating=True - ) - - # do analyze to get all voltages at generators and feed-in dataframe - edisgo.analyze(mode=mode) - - # get feed-in time series of all generators - if not mode: - feedin = edisgo.topology.pypsa.generators_t.p * 1000 - # drop dispatchable generators and slack generator - drop_labels = [ - _ for _ in feedin.columns if "GeneratorFluctuating" not in _ - ] + ["Generator_slack"] - else: - feedin = edisgo.topology.mv_grid.generators_timeseries() - for grid in edisgo.topology.mv_grid.lv_grids: - feedin = pd.concat([feedin, grid.generators_timeseries()], axis=1) - feedin.rename(columns=lambda _: repr(_), inplace=True) - # drop dispatchable generators - drop_labels = [_ for _ in feedin.columns if "GeneratorFluctuating" not in _] - feedin.drop(labels=drop_labels, axis=1, inplace=True) - - if isinstance(curtailment_timeseries, pd.Series): - # check if curtailment exceeds feed-in - self._precheck(curtailment_timeseries, feedin, "all_fluctuating_generators") - - # do curtailment - curtailment_method( - feedin, - generators, - curtailment_timeseries, - edisgo, - "all_fluctuating_generators", - **kwargs - ) - - elif isinstance(curtailment_timeseries, pd.DataFrame): - for col in curtailment_timeseries.columns: - logging.debug("Calculating curtailment for {}".format(col)) - - # filter generators - if isinstance(curtailment_timeseries.columns, pd.MultiIndex): - selected_generators = generators.loc[ - (generators.type == col[0]) - & (generators.weather_cell_id == col[1]) - ] - else: - selected_generators = generators.loc[(generators.type == col)] - - # check if curtailment exceeds feed-in - feedin_selected_generators = feedin.loc[ - :, selected_generators.gen_repr.values - ] - self._precheck( - curtailment_timeseries.loc[:, col], - feedin_selected_generators, - col, - ) - - # do curtailment - if not feedin_selected_generators.empty: - curtailment_method( - feedin_selected_generators, - selected_generators, - curtailment_timeseries.loc[:, col], - edisgo, - col, - **kwargs - ) - - # check if curtailment exceeds feed-in - self._postcheck(edisgo.topology, feedin) - - # update generator time series in pypsa topology - if edisgo.topology.pypsa is not None: - pypsa_io.update_pypsa_generator_timeseries(edisgo.topology) - - # add measure to Results object - edisgo.results.measures = "curtailment" - - def _check_timeindex(self, curtailment_timeseries, network): - """ - Raises an error if time index of curtailment time series does not - comply with the time index of load and feed-in time series. - - Parameters - ----------- - curtailment_timeseries : :pandas:`pandas.Series` or \ - :pandas:`pandas.DataFrame` - See parameter `curtailment_timeseries` in class definition for more - information. - - """ - raise NotImplementedError - - if curtailment_timeseries is None: - message = "No curtailment given." - logging.error(message) - raise KeyError(message) - try: - curtailment_timeseries.loc[network.timeseries.timeindex] - except Exception: - message = ( - "Time index of curtailment time series does not match " - "with load and feed-in time series." - ) - logging.error(message) - raise KeyError(message) - - def _precheck(self, curtailment_timeseries, feedin_df, curtailment_key): - """ - Raises an error if the curtailment at any time step exceeds the - total feed-in of all generators curtailment can be distributed among - at that time. - - Parameters - ----------- - curtailment_timeseries : :pandas:`pandas.Series` - Curtailment time series in kW for the technology (and weather - cell) specified in `curtailment_key`. - feedin_df : :pandas:`pandas.Series` - Feed-in time series in kW for all generators of type (and in - weather cell) specified in `curtailment_key`. - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - Technology (and weather cell) curtailment is given for. - - """ - raise NotImplementedError - - if not feedin_df.empty: - feedin_selected_sum = feedin_df.sum(axis=1) - diff = feedin_selected_sum - curtailment_timeseries - # add tolerance (set small negative values to zero) - diff[diff.between(-1, 0)] = 0 - if not (diff >= 0).all(): - bad_time_steps = [_ for _ in diff.index if diff[_] < 0] - message = ( - "Curtailment demand exceeds total feed-in in time " - "steps {}.".format(bad_time_steps) - ) - logging.error(message) - raise ValueError(message) - else: - bad_time_steps = [ - _ for _ in curtailment_timeseries.index if curtailment_timeseries[_] > 0 - ] - if bad_time_steps: - message = ( - "Curtailment given for time steps {} but there " - "are no generators to meet the curtailment target " - "for {}.".format(bad_time_steps, curtailment_key) - ) - logging.error(message) - raise ValueError(message) - - def _postcheck(self, network, feedin): - """ - Raises an error if the curtailment of a generator exceeds the - feed-in of that generator at any time step. - - Parameters - ----------- - network : :class:`~.network.topology.Topology` - feedin : :pandas:`pandas.DataFrame` - DataFrame with feed-in time series in kW. Columns of the dataframe - are :class:`~.network.components.GeneratorFluctuating`, index is - time index. - - """ - raise NotImplementedError - - curtailment = network.timeseries.curtailment - gen_repr = [repr(_) for _ in curtailment.columns] - feedin_repr = feedin.loc[:, gen_repr] - curtailment_repr = curtailment - curtailment_repr.columns = gen_repr - if not ((feedin_repr - curtailment_repr) > -1e-1).all().all(): - message = "Curtailment exceeds feed-in." - logging.error(message) - raise TypeError(message) diff --git a/edisgo/flex_opt/storage_positioning.py b/edisgo/flex_opt/storage_positioning.py deleted file mode 100644 index b0a7015da..000000000 --- a/edisgo/flex_opt/storage_positioning.py +++ /dev/null @@ -1,707 +0,0 @@ -import logging - -from math import ceil, sqrt - -import networkx as nx -import numpy as np -import pandas as pd - -from networkx.algorithms.shortest_paths.weighted import ( - _dijkstra as dijkstra_shortest_path_length, -) - -from edisgo.flex_opt import check_tech_constraints, costs -from edisgo.tools import plots, tools - -logger = logging.getLogger(__name__) - - -def one_storage_per_feeder( - edisgo, storage_timeseries, storage_nominal_power=None, **kwargs -): - """ - Allocates the given storage capacity to multiple smaller storages. - - ToDo: adapt to refactored code! - - For each feeder with load or voltage issues it is checked if integrating a - storage will reduce peaks in the feeder, starting with the feeder with - the highest theoretical network expansion costs. A heuristic approach is used - to estimate storage sizing and siting while storage operation is carried - over from the given storage operation. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - storage_timeseries : :pandas:`pandas.DataFrame` - Total active and reactive power time series that will be allocated to - the smaller storages in feeders with load or voltage issues. Columns of - the dataframe are 'p' containing active power time series in kW and 'q' - containing the reactive power time series in kvar. Index is a - :pandas:`pandas.DatetimeIndex`. - storage_nominal_power : :obj:`float` or None - Nominal power in kW that will be allocated to the smaller storages in - feeders with load or voltage issues. If no nominal power is provided - the maximum active power given in `storage_timeseries` is used. - Default: None. - debug : :obj:`Boolean`, optional - If dedug is True a dataframe with storage size and path to storage of - all installed and possibly discarded storages is saved to a csv file - and a plot with all storage positions is created and saved, both to the - current working directory with filename `storage_results_{MVgrid_id}`. - Default: False. - check_costs_reduction : :obj:`Boolean` or :obj:`str`, optional - This parameter specifies when and whether it should be checked if a - storage reduced network expansion costs or not. It can be used as a safety - check but can be quite time consuming. Possible options are: - - * 'each_feeder' - Costs reduction is checked for each feeder. If the storage did not - reduce network expansion costs it is discarded. - * 'once' - Costs reduction is checked after the total storage capacity is - allocated to the feeders. If the storages did not reduce network - expansion costs they are all discarded. - * False - Costs reduction is never checked. - - Default: False. - - """ - - def _feeder_ranking(grid_expansion_costs): - """ - Get feeder ranking from network expansion costs DataFrame. - - MV feeders are ranked descending by network expansion costs that are - attributed to that feeder. - - Parameters - ---------- - grid_expansion_costs : :pandas:`pandas.DataFrame` - grid_expansion_costs DataFrame from :class:`~.network.network.Results` - of the copied edisgo object. - - Returns - ------- - :pandas:`pandas.Series` - Series with ranked MV feeders (in the copied graph) of type - :class:`~.network.components.Line`. Feeders are ranked by total network - expansion costs of all measures conducted in the feeder. The - feeder with the highest costs is in the first row and the feeder - with the lowest costs in the last row. - - """ - return ( - grid_expansion_costs.groupby(["mv_feeder"], sort=False) - .sum() - .reset_index() - .sort_values(by=["total_costs"], ascending=False)["mv_feeder"] - ) - - def _shortest_path(node): - # TODO: LVStation class is not used anymore - # resolve this when storage positioning is refactored - if isinstance(node, LVStation): # noqa: F821 - return len(nx.shortest_path(node.mv_grid.graph, node.mv_grid.station, node)) - else: - return len(nx.shortest_path(node.grid.graph, node.grid.station, node)) - - def _find_battery_node(edisgo, critical_lines_feeder, critical_nodes_feeder): - """ - Evaluates where to install the storage. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - critical_lines_feeder : :pandas:`pandas.DataFrame` - Dataframe containing over-loaded lines in MV feeder, their maximum - relative over-loading and the corresponding time step. See - :func:`edisgo.flex_opt.check_tech_constraints.mv_line_overload` for - more information. - critical_nodes_feeder : :obj:`list` - List with all nodes in MV feeder with voltage issues. - - Returns - ------- - :obj:`float` - Node where storage is installed. - - """ - - # if there are overloaded lines in the MV feeder the battery storage - # will be installed at the node farthest away from the MV station - if not critical_lines_feeder.empty: - logger.debug("Storage positioning due to overload.") - # dictionary with nodes and their corresponding path length to - # MV station - path_length_dict = {} - for line in critical_lines_feeder.index: - nodes = line.grid.graph.nodes_from_line(line) - for node in nodes: - path_length_dict[node] = _shortest_path(node) - # return node farthest away - return [ - _ - for _ in path_length_dict - if path_length_dict[_] == max(path_length_dict.values()) - ][0] - - # if there are voltage issues in the MV network the battery storage will - # be installed at the first node in path that exceeds 2/3 of the line - # length from station to critical node with highest voltage deviation - if critical_nodes_feeder: - logger.debug("Storage positioning due to voltage issues.") - node = critical_nodes_feeder[0] - - # get path length from station to critical node - get_weight = lambda u, v, data: data["line"].length # noqa: E731 - path_length = dijkstra_shortest_path_length( - edisgo.network.mv_grid.graph, - edisgo.network.mv_grid.station, - get_weight, - target=node, - ) - - # find first node in path that exceeds 2/3 of the line length - # from station to critical node farthest away from the station - path = nx.shortest_path( - edisgo.network.mv_grid.graph, - edisgo.network.mv_grid.station, - node, - ) - return next(j for j in path if path_length[j] >= path_length[node] * 2 / 3) - - return None - - def _calc_storage_size(edisgo, feeder, max_storage_size): - """ - Calculates storage size that reduces residual load. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - feeder : :class:`~.network.components.Line` - MV feeder the storage will be connected to. The line object is an - object from the copied graph. - - Returns - ------- - :obj:`float` - Storage size that reduced the residual load in the feeder. - - """ - step_size = 200 - sizes = [0] + list( - np.arange(p_storage_min, max_storage_size + 0.5 * step_size, step_size) - ) - p_feeder = edisgo.network.results.pfa_p.loc[:, repr(feeder)] - q_feeder = edisgo.network.results.pfa_q.loc[:, repr(feeder)] - p_slack = edisgo.network.pypsa.generators_t.p.loc[:, "Generator_slack"] * 1e3 - - # get sign of p and q - lines = edisgo.network.pypsa.lines.loc[repr(feeder), :] - mv_station_bus = ( - "bus0" - if lines.loc["bus0"] == f"Bus_{repr(edisgo.network.mv_grid.station)}" - else "bus1" - ) - if mv_station_bus == "bus0": - diff = ( - edisgo.network.pypsa.lines_t.p1.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.p0.loc[:, repr(feeder)] - ) - diff_q = ( - edisgo.network.pypsa.lines_t.q1.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.q0.loc[:, repr(feeder)] - ) - else: - diff = ( - edisgo.network.pypsa.lines_t.p0.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.p1.loc[:, repr(feeder)] - ) - diff_q = ( - edisgo.network.pypsa.lines_t.q0.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.q1.loc[:, repr(feeder)] - ) - p_sign = pd.Series([-1 if _ < 0 else 1 for _ in diff], index=p_feeder.index) - q_sign = pd.Series([-1 if _ < 0 else 1 for _ in diff_q], index=p_feeder.index) - - # get allowed load factors per case - lf = { - "feed-in_case": edisgo.network.config["grid_expansion_load_factors"][ - "mv_feed-in_case_line" - ], - "load_case": network.config["grid_expansion_load_factors"][ - "mv_load_case_line" - ], - } - - # calculate maximum apparent power for each storage size to find - # storage size that minimizes apparent power in the feeder - p_feeder = p_feeder.multiply(p_sign) - q_feeder = q_feeder.multiply(q_sign) - s_max = [] - for size in sizes: - share = size / storage_nominal_power - p_storage = storage_timeseries.p * share - q_storage = storage_timeseries.q * share - p_total = p_feeder + p_storage - q_total = q_feeder + q_storage - p_hv_mv_station = p_slack - p_storage - lf_ts = p_hv_mv_station.apply( - lambda _: lf["feed-in_case"] if _ < 0 else lf["load_case"] - ) - s_max_ts = (p_total**2 + q_total**2).apply(sqrt).divide(lf_ts) - s_max.append(max(s_max_ts)) - - return sizes[pd.Series(s_max).idxmin()] - - def _critical_nodes_feeder(edisgo, feeder): - """ - Returns all nodes in MV feeder with voltage issues. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - feeder : :class:`~.network.components.Line` - MV feeder the storage will be connected to. The line object is an - object from the copied graph. - - Returns - ------- - :obj:`list` - List with all nodes in MV feeder with voltage issues. - - """ - # get all nodes with voltage issues in MV network - critical_nodes = check_tech_constraints.voltage_issues( - edisgo.network, voltage_levels="mv" - ) - if critical_nodes: - critical_nodes = critical_nodes[edisgo.network.mv_grid] - else: - return [] - - return [n for n in critical_nodes.index if repr(n.mv_feeder) == repr(feeder)] - - def _critical_lines_feeder(edisgo, feeder): - """ - Returns all lines in MV feeder with overload issues. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - feeder : :class:`~.network.components.Line` - MV feeder the storage will be connected to. The line object is an - object from the copied graph. - - Returns - ------- - :pandas:`pandas.DataFrame` - Dataframe containing over-loaded lines in MV feeder, their maximum - relative over-loading and the corresponding time step. See - :func:`edisgo.flex_opt.check_tech_constraints.mv_line_overload` for - more information. - - """ - # return grid_expansion_costs_feeder_ranking[ - # (grid_expansion_costs_feeder_ranking.mv_feeder == feeder) & - # (grid_expansion_costs_feeder_ranking.voltage_level == 'mv')] - # get all overloaded MV lines - critical_lines = check_tech_constraints.mv_line_overload(edisgo.network) - # filter overloaded lines in feeder - critical_lines_feeder = [ - line - for line in critical_lines.index - if repr(tools.get_mv_feeder_from_line(line)) == repr(feeder) - ] - - return critical_lines.loc[critical_lines_feeder, :] - - def _estimate_new_number_of_lines(critical_lines_feeder): - return sum( - ( - ceil( - critical_lines_feeder.loc[crit_line, "max_rel_overload"] - * crit_line.quantity - ) - - crit_line.quantity - ) - for crit_line in critical_lines_feeder.index - ) - - raise NotImplementedError - - debug = kwargs.get("debug", False) - check_costs_reduction = kwargs.get("check_costs_reduction", False) - - # global variables - # minimum and maximum storage power to be connected to the MV network - p_storage_min = 300 - p_storage_max = 4500 - - # remaining storage nominal power - if storage_nominal_power is None: - storage_nominal_power = max(abs(storage_timeseries.p)) - p_storage_remaining = storage_nominal_power - - if debug: - feeder_repr = [] - storage_path = [] - storage_repr = [] - storage_size = [] - - # rank MV feeders by network expansion costs - - # conduct network reinforcement on copied edisgo object on worst-case time - # steps - grid_expansion_results_init = edisgo.reinforce( - copy_graph=True, timesteps_pfa="snapshot_analysis", mode="mv" - ) - - # only analyse storage integration if there were any network expansion needs - if grid_expansion_results_init.equipment_changes.empty: - logger.debug( - "No storage integration necessary since there are no " - "network expansion needs." - ) - return - else: - equipment_changes_reinforcement_init = ( - grid_expansion_results_init.equipment_changes.loc[ - grid_expansion_results_init.equipment_changes.iteration_step > 0 - ] - ) - total_grid_expansion_costs = ( - grid_expansion_results_init.grid_expansion_costs.total_costs.sum() - ) - if equipment_changes_reinforcement_init.empty: - logger.debug( - "No storage integration necessary since there are no " - "network expansion needs." - ) - return - else: - network = equipment_changes_reinforcement_init.index[0].grid.network - - # calculate network expansion costs without costs for new generators - # to be used in feeder ranking - grid_expansion_costs_feeder_ranking = costs.grid_expansion_costs( - network, without_generator_import=True, mode="mv" - ) - - ranked_feeders = _feeder_ranking(grid_expansion_costs_feeder_ranking) - - count = 1 - storage_obj_list = [] - total_grid_expansion_costs_new = "not calculated" - for feeder in ranked_feeders.values: - logger.debug("Feeder: {}".format(count)) - count += 1 - - # first step: find node where storage will be installed - - critical_nodes_feeder = _critical_nodes_feeder(edisgo, feeder) - critical_lines_feeder = _critical_lines_feeder(edisgo, feeder) - - # get node the storage will be connected to (in original graph) - battery_node = _find_battery_node( - edisgo, critical_lines_feeder, critical_nodes_feeder - ) - - if battery_node: - # add to output lists - if debug: - feeder_repr.append(repr(feeder)) - storage_path.append( - nx.shortest_path( - edisgo.network.mv_grid.graph, - edisgo.network.mv_grid.station, - battery_node, - ) - ) - - # second step: calculate storage size - - max_storage_size = min(p_storage_remaining, p_storage_max) - p_storage = _calc_storage_size(edisgo, feeder, max_storage_size) - - # if p_storage is greater than or equal to the minimum storage - # power required, do storage integration - if p_storage >= p_storage_min: - # third step: integrate storage - - share = p_storage / storage_nominal_power - edisgo.integrate_storage( - timeseries=storage_timeseries.p * share, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q * share, - ) - tools.assign_mv_feeder_to_nodes(edisgo.network.mv_grid) - - # get new storage object - storage_obj = [ - _ - for _ in edisgo.network.mv_grid.graph.nodes_by_attribute("storage") - if _ in list(edisgo.network.mv_grid.graph.neighbors(battery_node)) - ][0] - storage_obj_list.append(storage_obj) - - logger.debug( - "Storage with nominal power of {} kW connected to " - "node {} (path to HV/MV station {}).".format( - p_storage, - battery_node, - nx.shortest_path( - battery_node.grid.graph, - battery_node.grid.station, - battery_node, - ), - ) - ) - - # fourth step: check if storage integration reduced network - # reinforcement costs or number of issues - - if check_costs_reduction == "each_feeder": - # calculate new network expansion costs - - grid_expansion_results_new = edisgo.reinforce( - copy_graph=True, timesteps_pfa="snapshot_analysis" - ) - - # fmt: off - total_grid_expansion_costs_new = ( - grid_expansion_results_new.grid_expansion_costs.total_costs.sum( - ) - ) - # fmt: on - - costs_diff = ( - total_grid_expansion_costs - total_grid_expansion_costs_new - ) - - if costs_diff > 0: - logger.debug( - "Storage integration in feeder {} reduced network " - "expansion costs by {} kEuro.".format(feeder, costs_diff) - ) - - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - - total_grid_expansion_costs = total_grid_expansion_costs_new - - else: - logger.debug( - "Storage integration in feeder {} did not reduce " - "network expansion costs (costs increased by {} " - "kEuro).".format(feeder, -costs_diff) - ) - - tools.disconnect_storage(edisgo.network, storage_obj) - p_storage = 0 - - if debug: - storage_repr.append(None) - storage_size.append(0) - - edisgo.integrate_storage( - timeseries=storage_timeseries.p * 0, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q * 0, - ) - tools.assign_mv_feeder_to_nodes(edisgo.network.mv_grid) - - else: - number_parallel_lines_before = _estimate_new_number_of_lines( - critical_lines_feeder - ) - edisgo.analyze() - critical_lines_feeder_new = _critical_lines_feeder(edisgo, feeder) - critical_nodes_feeder_new = _critical_nodes_feeder(edisgo, feeder) - number_parallel_lines = _estimate_new_number_of_lines( - critical_lines_feeder_new - ) - - # if there are critical lines check if number of parallel - # lines was reduced - if not critical_lines_feeder.empty: - diff_lines = ( - number_parallel_lines_before - number_parallel_lines - ) - # if it was not reduced check if there are critical - # nodes and if the number was reduced - if diff_lines <= 0: - # if there are no critical nodes remove storage - if not critical_nodes_feeder: - logger.debug( - "Storage integration in feeder {} did not " - "reduce number of critical lines (number " - "increased by {}), storage " - "is therefore removed.".format(feeder, -diff_lines) - ) - - tools.disconnect_storage(edisgo.network, storage_obj) - p_storage = 0 - - if debug: - storage_repr.append(None) - storage_size.append(0) - - edisgo.integrate_storage( - timeseries=storage_timeseries.p * 0, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q - * 0, - ) - tools.assign_mv_feeder_to_nodes( - edisgo.network.mv_grid - ) - else: - logger.debug( - "Critical nodes in feeder {} " - "before and after storage integration: " - "{} vs. {}".format( - feeder, - critical_nodes_feeder, - critical_nodes_feeder_new, - ) - ) - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - else: - logger.debug( - "Storage integration in feeder {} reduced " - "number of critical lines.".format(feeder) - ) - - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - - # if there are no critical lines - else: - logger.debug( - "Critical nodes in feeder {} " - "before and after storage integration: " - "{} vs. {}".format( - feeder, - critical_nodes_feeder, - critical_nodes_feeder_new, - ) - ) - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - - # fifth step: if there is storage capacity left, rerun - # the past steps for the next feeder in the ranking - # list - p_storage_remaining = p_storage_remaining - p_storage - if not p_storage_remaining > p_storage_min: - break - - else: - logger.debug("No storage integration in feeder {}.".format(feeder)) - - if debug: - storage_repr.append(None) - storage_size.append(0) - - edisgo.integrate_storage( - timeseries=storage_timeseries.p * 0, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q * 0, - ) - tools.assign_mv_feeder_to_nodes(edisgo.network.mv_grid) - else: - logger.debug( - "No storage integration in feeder {} because there " - "are neither overloading nor voltage issues.".format(feeder) - ) - - if debug: - storage_repr.append(None) - storage_size.append(0) - feeder_repr.append(repr(feeder)) - storage_path.append([]) - - if check_costs_reduction == "once": - # check costs reduction and discard all storages if costs were not - # reduced - grid_expansion_results_new = edisgo.reinforce( - copy_graph=True, timesteps_pfa="snapshot_analysis" - ) - - total_grid_expansion_costs_new = ( - grid_expansion_results_new.grid_expansion_costs.total_costs.sum() - ) - - costs_diff = total_grid_expansion_costs - total_grid_expansion_costs_new - - if costs_diff > 0: - logger.info( - "Storage integration in network {} reduced network " - "expansion costs by {} kEuro.".format(edisgo.network.id, costs_diff) - ) - else: - logger.info( - "Storage integration in network {} did not reduce " - "network expansion costs (costs increased by {} " - "kEuro).".format(edisgo.network.id, -costs_diff) - ) - - for storage in storage_obj_list: - tools.disconnect_storage(edisgo.network, storage) - elif check_costs_reduction == "each_feeder": - # if costs redcution was checked after each storage only give out - # total costs reduction - if total_grid_expansion_costs_new == "not calculated": - costs_diff = 0 - else: - total_grid_expansion_costs = ( - grid_expansion_results_init.grid_expansion_costs.total_costs.sum() - ) - costs_diff = total_grid_expansion_costs - total_grid_expansion_costs_new - - logger.info( - "Storage integration in network {} reduced network " - "expansion costs by {} kEuro.".format(edisgo.network.id, costs_diff) - ) - - if debug: - plots.storage_size( - edisgo.network.mv_grid, - edisgo.network.pypsa, - filename="storage_results_{}.pdf".format(edisgo.network.id), - lopf=False, - ) - storages_df = pd.DataFrame( - { - "path": storage_path, - "repr": storage_repr, - "p_nom": storage_size, - }, - index=feeder_repr, - ) - storages_df.to_csv("storage_results_{}.csv".format(edisgo.network.id)) - - edisgo.network.results.storages_costs_reduction = pd.DataFrame( - { - "grid_expansion_costs_initial": total_grid_expansion_costs, - "grid_expansion_costs_with_storages": total_grid_expansion_costs_new, - }, - index=[edisgo.network.id], - ) diff --git a/edisgo/io/dsm_import.py b/edisgo/io/dsm_import.py index fac856d9f..648ae5039 100644 --- a/edisgo/io/dsm_import.py +++ b/edisgo/io/dsm_import.py @@ -28,7 +28,7 @@ def oedb( ): """ Gets industrial and CTS DSM profiles from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Profiles comprise minimum and maximum load increase in MW as well as maximum energy pre- and postponing in MWh. diff --git a/edisgo/io/generators_import.py b/edisgo/io/generators_import.py index e9628c6d9..281e78a04 100755 --- a/edisgo/io/generators_import.py +++ b/edisgo/io/generators_import.py @@ -42,9 +42,9 @@ def oedb_legacy(edisgo_object, generator_scenario, **kwargs): The importer uses SQLAlchemy ORM objects. These are defined in `ego.io `_. The data is imported from the tables - `conventional power plants `_ and - `renewable power plants `_. When the generator data is retrieved, the following steps are conducted: diff --git a/edisgo/io/heat_pump_import.py b/edisgo/io/heat_pump_import.py index bb80064b7..69133ca11 100644 --- a/edisgo/io/heat_pump_import.py +++ b/edisgo/io/heat_pump_import.py @@ -583,7 +583,7 @@ def _grid_integration( def efficiency_resistive_heaters_oedb(scenario, engine): """ Get efficiency of resistive heaters from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Parameters ---------- diff --git a/edisgo/io/powermodels_io.py b/edisgo/io/powermodels_io.py index 16449bd17..b0fb22781 100644 --- a/edisgo/io/powermodels_io.py +++ b/edisgo/io/powermodels_io.py @@ -957,10 +957,16 @@ def _build_load( ) pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage") p_d = -min( - [psa_net.storage_units_t.p_set[inflexible_storage_units[stor_i]][0], 0] + [ + psa_net.storage_units_t.p_set[inflexible_storage_units[stor_i]][0], + np.float64(0.0), + ] ) q_d = -max( - [psa_net.storage_units_t.q_set[inflexible_storage_units[stor_i]][0], 0] + [ + psa_net.storage_units_t.q_set[inflexible_storage_units[stor_i]][0], + np.float64(0.0), + ] ) pm["load"][str(stor_i + len(loads_df.index) + 1)] = { "pd": p_d.round(20) / s_base, diff --git a/edisgo/io/timeseries_import.py b/edisgo/io/timeseries_import.py index 99204b1c2..5d154b965 100644 --- a/edisgo/io/timeseries_import.py +++ b/edisgo/io/timeseries_import.py @@ -74,7 +74,7 @@ def _timeindex_helper_func( def feedin_oedb_legacy(edisgo_object, timeindex=None): """ Import feed-in time series data for wind and solar power plants from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Parameters ---------- @@ -158,7 +158,7 @@ def feedin_oedb( ): """ Import feed-in time series data for wind and solar power plants from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Parameters ---------- @@ -319,7 +319,7 @@ def load_time_series_demandlib(edisgo_obj, timeindex=None): def cop_oedb(edisgo_object, engine, weather_cell_ids, timeindex=None): """ Get COP (coefficient of performance) time series data from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Parameters ---------- @@ -377,7 +377,7 @@ def cop_oedb(edisgo_object, engine, weather_cell_ids, timeindex=None): def heat_demand_oedb(edisgo_obj, scenario, engine, timeindex=None): """ Get heat demand profiles for heat pumps from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Heat demand data is returned for all heat pumps in the grid. For more information on how individual heat demand profiles are obtained see @@ -498,7 +498,7 @@ def electricity_demand_oedb( ): """ Get electricity demand profiles for all conventional loads from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Conventional loads comprise conventional electricity applications in the residential, CTS and industrial sector. @@ -1191,7 +1191,7 @@ def get_residential_electricity_profiles_per_building(building_ids, scenario, en List of building IDs to retrieve electricity demand profiles for. scenario : str Scenario for which to retrieve demand data. Possible options - are 'eGon2035' and 'eGon100RE'. + are 'eGon2021', 'eGon2035' and 'eGon100RE'. engine : :sqlalchemy:`sqlalchemy.Engine` Database engine. @@ -1220,30 +1220,21 @@ def _get_scaling_factors_of_zensus_cells(zensus_ids): column factor. """ - with session_scope_egon_data(engine) as session: - if scenario == "eGon2035": - query = session.query( - egon_household_electricity_profile_in_census_cell.cell_id, - egon_household_electricity_profile_in_census_cell.factor_2035.label( - "factor" - ), - ).filter( - egon_household_electricity_profile_in_census_cell.cell_id.in_( - zensus_ids - ) - ) - else: - query = session.query( - egon_household_electricity_profile_in_census_cell.cell_id, - egon_household_electricity_profile_in_census_cell.factor_2050.label( - "factor" - ), - ).filter( - egon_household_electricity_profile_in_census_cell.cell_id.in_( - zensus_ids - ) - ) - return pd.read_sql(query.statement, engine, index_col="cell_id") + if scenario == "eGon2021": + return pd.DataFrame(index=zensus_ids, data={"factor": 1.0}) + else: + with session_scope_egon_data(engine) as session: + if scenario == "eGon2035": + query = session.query( + hh_profile.cell_id, + hh_profile.factor_2035.label("factor"), + ).filter(hh_profile.cell_id.in_(zensus_ids)) + else: + query = session.query( + hh_profile.cell_id, + hh_profile.factor_2050.label("factor"), + ).filter(hh_profile.cell_id.in_(zensus_ids)) + return pd.read_sql(query.statement, engine, index_col="cell_id") def _get_profile_ids_of_buildings(building_ids): """ @@ -1302,7 +1293,9 @@ def _get_profiles(profile_ids): saio.register_schema("demand", engine) from saio.demand import ( - egon_household_electricity_profile_in_census_cell, + egon_household_electricity_profile_in_census_cell as hh_profile, + ) + from saio.demand import ( egon_household_electricity_profile_of_buildings, iee_household_load_profiles, ) @@ -1350,7 +1343,7 @@ def get_industrial_electricity_profiles_per_site(site_ids, scenario, engine): List of industrial site and OSM IDs to retrieve electricity demand profiles for. scenario : str Scenario for which to retrieve demand data. Possible options - are 'eGon2035' and 'eGon100RE'. + are 'eGon2021', 'eGon2035' and 'eGon100RE'. engine : :sqlalchemy:`sqlalchemy.Engine` Database engine. diff --git a/edisgo/network/heat.py b/edisgo/network/heat.py index 22403d0f2..1762f56c7 100644 --- a/edisgo/network/heat.py +++ b/edisgo/network/heat.py @@ -131,7 +131,7 @@ def set_cop(self, edisgo_object, ts_cop, **kwargs): Write COP time series for heat pumps to py:attr:`~cop_df`. COP time series can either be given to this function or be obtained from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. In case they are obtained from the OpenEnergy DataBase the heat pumps need to already be integrated into the grid, i.e. given in :attr:`~.network.topology.Topology.loads_df`. @@ -150,7 +150,7 @@ def set_cop(self, edisgo_object, ts_cop, **kwargs): * 'oedb' COP / efficiency data are obtained from the `OpenEnergy DataBase - `_. + `_. In case of heat pumps weather cell specific hourly COP time series are obtained (see :func:`edisgo.io.timeseries_import.cop_oedb` for more information). Using information on which weather cell each heat pump @@ -317,7 +317,7 @@ def set_heat_demand(self, edisgo_object, ts_heat_demand, **kwargs): Write heat demand time series of heat pumps to py:attr:`~heat_demand_df`. Heat demand time series can either be given to this function or be obtained from - the `OpenEnergy DataBase `_. + the `OpenEnergy DataBase `_. In case they are obtained from the OpenEnergy DataBase the heat pumps need to already be integrated into the grid, i.e. given in :attr:`~.network.topology.Topology.loads_df`. @@ -336,7 +336,7 @@ def set_heat_demand(self, edisgo_object, ts_heat_demand, **kwargs): * 'oedb' Heat demand time series are obtained from the `OpenEnergy DataBase - `_ (see + `_ (see :func:`edisgo.io.timeseries_import.heat_demand_oedb` for more information). Time series are only obtained for heat pumps that are already integrated diff --git a/edisgo/network/results.py b/edisgo/network/results.py index 8f6e8b044..151fbd480 100755 --- a/edisgo/network/results.py +++ b/edisgo/network/results.py @@ -628,7 +628,7 @@ def _add_line_to_equipment_changes(self, line): "iteration_step": [0], "change": ["added"], "equipment": [line.type_info], - "quantity": [1], + "quantity": [line.num_parallel], }, index=[line.name], ), diff --git a/edisgo/network/timeseries.py b/edisgo/network/timeseries.py index 8da353ff2..15337f06b 100644 --- a/edisgo/network/timeseries.py +++ b/edisgo/network/timeseries.py @@ -1204,7 +1204,7 @@ def predefined_fluctuating_generators_by_technology( Technology and weather cell specific hourly feed-in time series are obtained from the `OpenEnergy DataBase - `_. See + `_. See :func:`edisgo.io.timeseries_import.feedin_oedb` for more information. This option requires that the parameter `engine` is provided in case @@ -1448,6 +1448,12 @@ def predefined_conventional_loads_by_sector( load_names = self._check_if_components_exist(edisgo_object, load_names, "loads") loads_df = edisgo_object.topology.loads_df.loc[load_names, :] + # check if loads contain annual demand + if not all(loads_df.annual_consumption.notnull()): + raise AttributeError( + "The annual consumption of some loads is missing. Please provide" + ) + # scale time series by annual consumption ts_scaled = loads_df.apply( lambda x: ts_loads[x.sector] * x.annual_consumption, diff --git a/edisgo/network/topology.py b/edisgo/network/topology.py index f411bf312..2462314f0 100755 --- a/edisgo/network/topology.py +++ b/edisgo/network/topology.py @@ -2123,7 +2123,7 @@ def _choose_random_substation_id(): """ if comp_type == "generator": - random.seed(a=comp_data["generator_id"]) + random.seed(a=int(comp_data["generator_id"])) elif comp_type == "storage_unit": random.seed(a=len(self.storage_units_df)) else: diff --git a/edisgo/tools/logger.py b/edisgo/tools/logger.py index 5b1a62a55..20514f947 100644 --- a/edisgo/tools/logger.py +++ b/edisgo/tools/logger.py @@ -119,7 +119,7 @@ def setup_logger( def create_dir(dir_path): if not os.path.isdir(dir_path): - os.mkdir(dir_path) + os.makedirs(dir_path) def get_default_root_dir(): dir_path = str(cfg_edisgo.get("user_dirs", "root_dir")) @@ -140,9 +140,9 @@ def create_home_dir(): log_dir = os.path.join( get_default_root_dir(), cfg_edisgo.get("user_dirs", "log_dir") ) - create_dir(log_dir) if log_dir is not None: + create_dir(log_dir) file_name = os.path.join(log_dir, file_name) if reset_loggers: diff --git a/edisgo/tools/plots.py b/edisgo/tools/plots.py index 381c73754..3a1207788 100644 --- a/edisgo/tools/plots.py +++ b/edisgo/tools/plots.py @@ -6,7 +6,6 @@ from typing import TYPE_CHECKING import matplotlib -import matplotlib.cm as cm import numpy as np import pandas as pd import plotly.graph_objects as go @@ -604,7 +603,7 @@ def nodes_by_costs(buses, grid_expansion_costs, edisgo_obj): bus_sizes, bus_colors = nodes_by_costs( pypsa_plot.buses.index, grid_expansion_costs, edisgo_obj ) - bus_cmap = plt.cm.get_cmap(lines_cmap) + bus_cmap = matplotlib.pyplot.colormaps.get_cmap(lines_cmap) elif node_color == "curtailment": bus_sizes = nodes_curtailment(pypsa_plot.buses.index, curtailment_df) bus_colors = "orangered" @@ -681,7 +680,8 @@ def nodes_by_costs(buses, grid_expansion_costs, edisgo_obj): line_width = pypsa_plot.lines.s_nom * scaling_factor_line_width else: line_width = 2 - cmap = plt.cm.get_cmap(lines_cmap) + cmap = matplotlib.pyplot.colormaps.get_cmap(lines_cmap) + ll = pypsa_plot.plot( line_colors=line_colors, line_cmap=cmap, @@ -888,7 +888,7 @@ def color_map_color( """ norm = matplotlib.colors.Normalize(vmin=vmin, vmax=vmax) if isinstance(cmap_name, str): - cmap = cm.get_cmap(cmap_name) + cmap = matplotlib.pyplot.colormaps.get_cmap(cmap_name) else: cmap = matplotlib.colors.LinearSegmentedColormap.from_list("mycmap", cmap_name) rgb = cmap(norm(abs(value)))[:3] diff --git a/examples/electromobility_example.ipynb b/examples/electromobility_example.ipynb index 5bc23ea1d..bfc97335f 100644 --- a/examples/electromobility_example.ipynb +++ b/examples/electromobility_example.ipynb @@ -430,8 +430,7 @@ " url = (f\"https://github.com/openego/eDisGo/tree/dev/\" +\n", " f\"tests/data/simbev_example_scenario/{ags}/\")\n", " page = requests.get(url).text\n", - " items = json.loads(page)[\"payload\"][\"tree\"][\"items\"]\n", - " filenames = [f[\"name\"] for f in items if \"csv\" in f[\"name\"]]\n", + " filenames = [_ for _ in page.split('\"') if \".csv\" in _ and \"/\" not in _]\n", "\n", " for file in filenames:\n", " req = requests.get(f\"{raw_url}/{ags}/{file}\")\n", @@ -470,8 +469,7 @@ " url = (\"https://github.com/openego/eDisGo/tree/dev/\" +\n", " \"tests/data/tracbev_example_scenario/\")\n", " page = requests.get(url).text\n", - " items = json.loads(page)[\"payload\"][\"tree\"][\"items\"]\n", - " filenames = [f[\"name\"] for f in items if \"gpkg\" in f[\"name\"]]\n", + " filenames = [_ for _ in page.split('\"') if \".gpkg\" in _ and \"/\" not in _]\n", "\n", " for file in filenames:\n", " req = requests.get(\n", diff --git a/rtd_requirements.txt b/rtd_requirements.txt index dd3c393d5..3900ea862 100644 --- a/rtd_requirements.txt +++ b/rtd_requirements.txt @@ -1,5 +1,5 @@ dash < 2.9.0 -demandlib +demandlib < 0.2.0 egoio >= 0.4.7 geopy >= 2.0.0 jupyter_dash @@ -8,13 +8,12 @@ multiprocess networkx >= 2.5.0 pandas >= 1.4.0 plotly -pyomo >= 6.0 pypower pyproj >= 3.0.0 -pypsa >=0.17.0, <=0.20.1 +pypsa == 0.26.2 pyyaml saio -scikit-learn +scikit-learn < 1.3.0 sphinx sphinx_rtd_theme >=0.5.2 sphinx-autodoc-typehints diff --git a/setup.py b/setup.py index 4f570f745..f04a4b97c 100644 --- a/setup.py +++ b/setup.py @@ -4,9 +4,9 @@ from setuptools import find_packages, setup -if sys.version_info[:2] < (3, 8): +if sys.version_info[:2] < (3, 9): error = ( - "eDisGo requires Python 3.8 or later (%d.%d detected)." % sys.version_info[:2] + "eDisGo requires Python 3.9 or later (%d.%d detected)." % sys.version_info[:2] ) sys.stderr.write(error + "\n") sys.exit(1) @@ -33,7 +33,7 @@ def read(fname): requirements = [ "contextily", "dash < 2.9.0", - "demandlib", + "demandlib < 0.2.0", "descartes", "egoio >= 0.4.7", "geoalchemy2 < 0.7.0", @@ -50,13 +50,12 @@ def read(fname): "plotly", "pydot", "pygeos", - "pyomo <= 6.4.2", # Problem with PyPSA 20.1 fixed in newest PyPSA release "pypower", "pyproj >= 3.0.0", - "pypsa >= 0.17.0, <= 0.20.1", + "pypsa == 0.26.2", "pyyaml", "saio", - "scikit-learn <= 1.1.1", + "scikit-learn < 1.3.0", "shapely >= 1.7.0", "sqlalchemy < 1.4.0", "sshtunnel", diff --git a/tests/conftest.py b/tests/conftest.py index 6809eb81a..998adaebb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -57,6 +57,9 @@ def pytest_addoption(parser): default=False, help="run tests that only work locally", ) + parser.addoption( + "--runoedbtest", action="store_true", default=False, help="Run OEDB tests" + ) def pytest_collection_modifyitems(config, items): @@ -75,3 +78,8 @@ def pytest_collection_modifyitems(config, items): for item in items: if "runonlinux" in item.keywords: item.add_marker(skip_windows) + if not config.getoption("--runoedbtest"): + skip_oedbtest = pytest.mark.skip(reason="need --runoedbtest option to run") + for item in items: + if "oedbtest" in item.keywords: + item.add_marker(skip_oedbtest) diff --git a/tests/flex_opt/test_costs.py b/tests/flex_opt/test_costs.py index 813714aa8..308f9e7e6 100644 --- a/tests/flex_opt/test_costs.py +++ b/tests/flex_opt/test_costs.py @@ -3,7 +3,7 @@ import pytest from edisgo import EDisGo -from edisgo.flex_opt.costs import grid_expansion_costs, line_expansion_costs +from edisgo.flex_opt import costs as costs_mod class TestCosts: @@ -76,12 +76,12 @@ def test_costs(self): ], ) - costs = grid_expansion_costs(self.edisgo) + costs = costs_mod.grid_expansion_costs(self.edisgo) assert len(costs) == 4 assert ( costs.loc["MVStation_1_transformer_reinforced_2", "voltage_level"] - == "mv/lv" + == "hv/mv" ) assert costs.loc["MVStation_1_transformer_reinforced_2", "quantity"] == 1 assert costs.loc["MVStation_1_transformer_reinforced_2", "total_costs"] == 1000 @@ -97,13 +97,13 @@ def test_costs(self): assert costs.loc["Line_10019", "type"] == "48-AL1/8-ST1A" assert costs.loc["Line_10019", "voltage_level"] == "mv" assert np.isclose(costs.loc["Line_50000002", "total_costs"], 2.34) - assert np.isclose(costs.loc["Line_50000002", "length"], 0.09) - assert costs.loc["Line_50000002", "quantity"] == 3 + assert np.isclose(costs.loc["Line_50000002", "length"], 0.03) + assert costs.loc["Line_50000002", "quantity"] == 1 assert costs.loc["Line_50000002", "type"] == "NAYY 4x1x35" assert costs.loc["Line_50000002", "voltage_level"] == "lv" def test_line_expansion_costs(self): - costs = line_expansion_costs(self.edisgo) + costs = costs_mod.line_expansion_costs(self.edisgo) assert len(costs) == len(self.edisgo.topology.lines_df) assert (costs.index == self.edisgo.topology.lines_df.index).all() assert len(costs[costs.voltage_level == "mv"]) == len( @@ -116,7 +116,9 @@ def test_line_expansion_costs(self): assert np.isclose(costs.at["Line_10000015", "costs_cable"], 0.27) assert costs.at["Line_10000015", "voltage_level"] == "lv" - costs = line_expansion_costs(self.edisgo, ["Line_10003", "Line_10000015"]) + costs = costs_mod.line_expansion_costs( + self.edisgo, ["Line_10003", "Line_10000015"] + ) assert len(costs) == 2 assert (costs.index.values == ["Line_10003", "Line_10000015"]).all() assert np.isclose(costs.at["Line_10003", "costs_earthworks"], 0.083904 * 60) @@ -125,3 +127,28 @@ def test_line_expansion_costs(self): assert np.isclose(costs.at["Line_10000015", "costs_earthworks"], 1.53) assert np.isclose(costs.at["Line_10000015", "costs_cable"], 0.27) assert costs.at["Line_10000015", "voltage_level"] == "lv" + + def test_transformer_expansion_costs(self): + costs = costs_mod.transformer_expansion_costs(self.edisgo) + transformers_df = pd.concat( + [ + self.edisgo.topology.transformers_df, + self.edisgo.topology.transformers_hvmv_df, + ] + ) + assert len(costs) == len(transformers_df) + assert sorted(costs.index) == sorted(transformers_df.index) + assert len(costs[costs.voltage_level == "hv/mv"]) == len( + self.edisgo.topology.transformers_hvmv_df + ) + assert np.isclose(costs.at["MVStation_1_transformer_1", "costs"], 1000) + assert costs.at["MVStation_1_transformer_1", "voltage_level"] == "hv/mv" + assert np.isclose(costs.at["LVStation_4_transformer_2", "costs"], 10) + assert costs.at["LVStation_4_transformer_2", "voltage_level"] == "mv/lv" + + costs = costs_mod.transformer_expansion_costs( + self.edisgo, ["LVStation_5_transformer_1"] + ) + assert len(costs) == 1 + assert np.isclose(costs.at["LVStation_5_transformer_1", "costs"], 10) + assert costs.at["LVStation_5_transformer_1", "voltage_level"] == "mv/lv" diff --git a/tests/flex_opt/test_reinforce_grid.py b/tests/flex_opt/test_reinforce_grid.py index cb6074310..e073b2326 100644 --- a/tests/flex_opt/test_reinforce_grid.py +++ b/tests/flex_opt/test_reinforce_grid.py @@ -81,7 +81,7 @@ def test_run_separate_lv_grids(self): assert len(g.buses_df) > 1 assert len(lv_grids_new) == 26 - assert np.isclose(edisgo.results.grid_expansion_costs.total_costs.sum(), 280.06) + assert np.isclose(edisgo.results.grid_expansion_costs.total_costs.sum(), 440.06) # check if all generators are still present assert np.isclose( diff --git a/tests/io/test_generators_import.py b/tests/io/test_generators_import.py index 9adfdde1e..51ee4b5ee 100644 --- a/tests/io/test_generators_import.py +++ b/tests/io/test_generators_import.py @@ -484,6 +484,7 @@ class TestGeneratorsImportOEDB: """ @pytest.mark.slow + @pytest.mark.oedbtest def test_oedb_legacy_without_timeseries(self): edisgo = EDisGo( ding0_grid=pytest.ding0_test_network_2_path, @@ -497,6 +498,7 @@ def test_oedb_legacy_without_timeseries(self): assert np.isclose(edisgo.topology.generators_df.p_nom.sum(), 20.18783) @pytest.mark.slow + @pytest.mark.oedbtest def test_oedb_legacy_with_worst_case_timeseries(self): edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_2_path) edisgo.set_time_series_worst_case_analysis() @@ -568,6 +570,7 @@ def test_oedb_legacy_with_worst_case_timeseries(self): # :, new_solar_gen.name] / new_solar_gen.p_nom).all() @pytest.mark.slow + @pytest.mark.oedbtest def test_oedb_legacy_with_timeseries_by_technology(self): timeindex = pd.date_range("1/1/2012", periods=3, freq="H") ts_gen_dispatchable = pd.DataFrame( @@ -647,6 +650,7 @@ def test_oedb_legacy_with_timeseries_by_technology(self): # :, new_solar_gen.name] / new_solar_gen.p_nom).all() @pytest.mark.slow + @pytest.mark.oedbtest def test_target_capacity(self): edisgo = EDisGo( ding0_grid=pytest.ding0_test_network_2_path, diff --git a/tests/io/test_timeseries_import.py b/tests/io/test_timeseries_import.py index 6c88f54b4..17340163b 100644 --- a/tests/io/test_timeseries_import.py +++ b/tests/io/test_timeseries_import.py @@ -59,6 +59,7 @@ def test__timeindex_helper_func(self): assert_index_equal(ind, given_index) assert_index_equal(ind_full, timeindex) + @pytest.mark.oedbtest def test_feedin_oedb_legacy(self): edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_path) timeindex = pd.date_range("1/1/2010", periods=3000, freq="H") @@ -268,6 +269,13 @@ def test_get_residential_electricity_profiles_per_building(self): assert df.shape == (8760, 1) assert np.isclose(df.loc[:, 442081].sum(), 3.20688, atol=1e-3) + # test with status quo + df = timeseries_import.get_residential_electricity_profiles_per_building( + [-1, 442081], "eGon2021", pytest.engine + ) + assert df.shape == (8760, 1) + assert np.isclose(df.loc[:, 442081].sum(), 4.288845, atol=1e-3) + @pytest.mark.local def test_get_industrial_electricity_profiles_per_site(self): # test with one site and one OSM area @@ -283,3 +291,11 @@ def test_get_industrial_electricity_profiles_per_site(self): [541658], "eGon2035", pytest.engine ) assert df.shape == (8760, 1) + + # test with status quo + df = timeseries_import.get_industrial_electricity_profiles_per_site( + [1, 541658], "eGon2021", pytest.engine + ) + assert df.shape == (8760, 2) + assert np.isclose(df.loc[:, 1].sum(), 31655.640, atol=1e-3) + assert np.isclose(df.loc[:, 541658].sum(), 2910.816, atol=1e-3) diff --git a/tests/network/test_timeseries.py b/tests/network/test_timeseries.py index 2c05eda49..4666a836a 100644 --- a/tests/network/test_timeseries.py +++ b/tests/network/test_timeseries.py @@ -1235,6 +1235,7 @@ def test_worst_case_storage_units(self): ) @pytest.mark.slow + @pytest.mark.oedbtest def test_predefined_fluctuating_generators_by_technology(self): timeindex = pd.date_range("1/1/2011 12:00", periods=2, freq="H") self.edisgo.timeseries.timeindex = timeindex @@ -1793,6 +1794,26 @@ def test_predefined_conventional_loads_by_sector(self, caplog): ).values, ).all() + # test Error if 'annual_consumption' is missing + # Save the original 'annual_consumption' values + original_annual_consumption = self.edisgo.topology.loads_df[ + "annual_consumption" + ].copy() + # Set 'annual_consumption' to None for the test + self.edisgo.topology.loads_df["annual_consumption"] = None + with pytest.raises(AttributeError) as exc_info: + self.edisgo.timeseries.predefined_conventional_loads_by_sector( + self.edisgo, "demandlib" + ) + assert ( + exc_info.value.args[0] + == "The annual consumption of some loads is missing. Please provide" + ) + # Restore the original 'annual_consumption' values + self.edisgo.topology.loads_df[ + "annual_consumption" + ] = original_annual_consumption + def test_predefined_charging_points_by_use_case(self, caplog): index = pd.date_range("1/1/2018", periods=3, freq="H") self.edisgo.timeseries.timeindex = index diff --git a/tests/test_edisgo.py b/tests/test_edisgo.py index 1f09e62d9..dbfa03a33 100755 --- a/tests/test_edisgo.py +++ b/tests/test_edisgo.py @@ -382,6 +382,7 @@ def test_to_graph(self): ) @pytest.mark.slow + @pytest.mark.oedbtest def test_generator_import(self): edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_2_path) edisgo.import_generators("nep2035") @@ -521,7 +522,7 @@ def test_reinforce_catch_convergence(self): ) results = self.edisgo.reinforce(catch_convergence_problems=True) assert results.unresolved_issues.empty - assert len(results.grid_expansion_costs) == 132 + assert len(results.grid_expansion_costs) == 134 assert len(results.equipment_changes) == 218 assert results.v_res.shape == (4, 142) @@ -543,7 +544,7 @@ def test_enhanced_reinforce_grid(self): results = edisgo_obj.results - assert len(results.grid_expansion_costs) == 445 + assert len(results.grid_expansion_costs) == 454 assert len(results.equipment_changes) == 892 assert results.v_res.shape == (4, 148) diff --git a/tests/test_examples.py b/tests/test_examples.py index 8e31d88d9..9f0a862eb 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -26,6 +26,7 @@ def test_plot_example_ipynb(self): assert result.exec_error is None @pytest.mark.slow + @pytest.mark.oedbtest def test_electromobility_example_ipynb(self): path = os.path.join(self.examples_dir_path, "electromobility_example.ipynb") notebook = pytest_notebook.notebook.load_notebook(path=path) @@ -39,6 +40,7 @@ def test_electromobility_example_ipynb(self): assert result.exec_error is None @pytest.mark.slow + @pytest.mark.oedbtest def test_edisgo_simple_example_ipynb(self): path = os.path.join(self.examples_dir_path, "edisgo_simple_example.ipynb") notebook = pytest_notebook.notebook.load_notebook(path=path) diff --git a/tests/tools/test_logger.py b/tests/tools/test_logger.py index 80e372eef..eee071df6 100644 --- a/tests/tools/test_logger.py +++ b/tests/tools/test_logger.py @@ -1,65 +1,85 @@ import logging import os +import pytest + from edisgo.tools.logger import setup_logger +def check_file_output(filename, output): + with open(filename) as file: + last_line = file.readlines()[-1].split(" ")[3:] + last_line = " ".join(last_line) + assert last_line == output + + +def reset_loggers(filename): + logger = logging.getLogger("edisgo") + logger.handlers.clear() + logger.propagate = True + # try removing file - when run on github for Windows removing the file leads + # to a PermissionError + try: + os.remove(filename) + except PermissionError: + pass + + class TestClass: def test_setup_logger(self): - def check_file_output(output): - with open("edisgo.log") as file: - last_line = file.readlines()[-1].split(" ")[3:] - last_line = " ".join(last_line) - assert last_line == output - - def reset_loggers(): - logger = logging.getLogger("edisgo") - logger.propagate = True - logger.handlers.clear() - logger = logging.getLogger() - logger.handlers.clear() - - if os.path.exists("edisgo.log"): - os.remove("edisgo.log") + filename = os.path.join( + os.path.expanduser("~"), ".edisgo", "log", "test_log.log" + ) + if os.path.exists(filename): + os.remove(filename) setup_logger( loggers=[ {"name": "root", "file_level": "debug", "stream_level": "debug"}, {"name": "edisgo", "file_level": "debug", "stream_level": "debug"}, ], - file_name="edisgo.log", + file_name="test_log.log", + log_dir="default", ) logger = logging.getLogger("edisgo") # Test that edisgo logger writes to file. logger.debug("root") - check_file_output("edisgo - DEBUG: root\n") + check_file_output(filename, "edisgo - DEBUG: root\n") # Test that root logger writes to file. logging.debug("root") - check_file_output("root - DEBUG: root\n") + check_file_output(filename, "root - DEBUG: root\n") + + reset_loggers(filename) - # reset_loggers() + @pytest.mark.runonlinux + def test_setup_logger_2(self): + """ + This test is only run on linux, as the log file is written to the user + home directory, which is not allowed when tests are run on github. + + """ + + # delete any existing log files + log_files = [_ for _ in os.listdir(os.getcwd()) if ".log" in _] + for log_file in log_files: + os.remove(log_file) setup_logger( loggers=[ {"name": "edisgo", "file_level": "debug", "stream_level": "debug"}, ], - file_name="edisgo.log", reset_loggers=True, debug_message=True, ) logger = logging.getLogger("edisgo") + + filename = [_ for _ in os.listdir(os.getcwd()) if ".log" in _][0] # Test that edisgo logger writes to file. logger.debug("edisgo") - check_file_output("edisgo - DEBUG: edisgo\n") - # Test that root logger doesn't writes to file. - logging.debug("edisgo") - check_file_output("edisgo - DEBUG: edisgo\n") - - @classmethod - def teardown_class(cls): - logger = logging.getLogger("edisgo") - logger.handlers.clear() - logger.propagate = True + check_file_output(filename, "edisgo - DEBUG: edisgo\n") + # Test that root logger doesn't write to file. + logging.debug("root") + check_file_output(filename, "edisgo - DEBUG: edisgo\n") - os.remove("edisgo.log") + reset_loggers(filename) diff --git a/tests/tools/test_tools.py b/tests/tools/test_tools.py index 606c60d81..40c34a63b 100644 --- a/tests/tools/test_tools.py +++ b/tests/tools/test_tools.py @@ -168,6 +168,7 @@ def test_determine_bus_voltage_level(self): assert tools.determine_bus_voltage_level(self.edisgo, bus_voltage_level_6) == 6 assert tools.determine_bus_voltage_level(self.edisgo, bus_voltage_level_7) == 7 + @pytest.mark.oedbtest def test_get_weather_cells_intersecting_with_grid_district(self): weather_cells = tools.get_weather_cells_intersecting_with_grid_district( self.edisgo