Skip to content

Commit

Permalink
Merge pull request #1175 from GbotemiB/revise-export
Browse files Browse the repository at this point in the history
Revise export ports data
  • Loading branch information
davide-f authored Nov 28, 2024
2 parents c9f30e1 + 5796bcf commit e6e3347
Show file tree
Hide file tree
Showing 7 changed files with 53 additions and 2 deletions.
5 changes: 4 additions & 1 deletion Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -1001,8 +1001,11 @@ rule solve_sector_networks:


rule prepare_ports:
params:
custom_export=config["custom_data"]["export_ports"],
output:
ports="data/ports.csv", # TODO move from data to resources
export_ports="resources/" + SECDIR + "export_ports.csv",
script:
"scripts/prepare_ports.py"

Expand Down Expand Up @@ -1156,7 +1159,7 @@ rule add_export:
costs=config["costs"],
input:
overrides="data/override_component_attrs",
export_ports="data/export_ports.csv",
export_ports="resources/" + SECDIR + "export_ports.csv",
costs=COSTDIR + "costs_{planning_horizons}.csv",
ship_profile="resources/" + SECDIR + "ship_profile_{h2export}TWh.csv",
network=RESDIR
Expand Down
1 change: 1 addition & 0 deletions config.default.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -478,6 +478,7 @@ custom_data:
add_existing: false
custom_sectors: false
gas_network: false # If "True" then a custom .csv file must be placed in "resources/custom_data/pipelines.csv" , If "False" the user can choose btw "greenfield" or Model built-in datasets. Please refer to ["sector"] below.
export_ports: false # If "True" then a custom .csv file must be placed in "data/custom/export_ports.csv"

industry:
reference_year: 2015
Expand Down
File renamed without changes.
2 changes: 2 additions & 0 deletions doc/release_notes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@ E.g. if a new rule becomes available describe how to use it `make test` and in o

* Adds CI to update keep pinned environment files up to date. `PR #1183 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/1183>`__ and `PR #1210 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/1210>`__

* Revise ports data for export in `add_export.py` related to sector model `PR #1175 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/1175>`_

PyPSA-Earth 0.4.1
=================

Expand Down
6 changes: 5 additions & 1 deletion scripts/add_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,9 @@ def select_ports(n):
keep_default_na=False,
).squeeze()

# ports = raw_ports[["name", "country", "fraction", "x", "y"]]
# ports.loc[:, "fraction"] = ports.fraction.round(1)

ports = ports[ports.country.isin(countries)]
if len(ports) < 1:
logger.error(
Expand Down Expand Up @@ -147,7 +150,8 @@ def create_export_profile():
and resamples it to temp resolution obtained from the wildcard.
"""

export_h2 = eval(snakemake.wildcards["h2export"]) * 1e6 # convert TWh to MWh
# convert TWh to MWh
export_h2 = eval(snakemake.wildcards["h2export"]) * 1e6

if snakemake.params.export_profile == "constant":
export_profile = export_h2 / 8760
Expand Down
40 changes: 40 additions & 0 deletions scripts/prepare_ports.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
import logging
import os
import shutil
from pathlib import Path

import country_converter as coco
Expand Down Expand Up @@ -31,12 +32,43 @@ def download_ports():
return wpi_csv


def filter_ports(dataframe):
"""
Filters ports based on their harbor size and returns a DataFrame containing
only the largest port for each country.
"""
# Filter large sized ports
large_ports = dataframe[dataframe["Harbor Size"] == "Large"]
countries_with_large_ports = large_ports["country"].unique()

# Filter out countries with large ports
remaining_ports = dataframe[~dataframe["country"].isin(countries_with_large_ports)]

# Filter medium sized ports from remaining ports
medium_ports = remaining_ports[remaining_ports["Harbor Size"] == "Medium"]
countries_with_medium_ports = medium_ports["country"].unique()

# Filter out countries with medium ports
remaining_ports = remaining_ports[
~remaining_ports["country"].isin(countries_with_medium_ports)
]

# Filter small sized ports from remaining ports
small_ports = remaining_ports[remaining_ports["Harbor Size"] == "Small"]

# Combine all filtered ports
filtered_ports = pd.concat([large_ports, medium_ports, small_ports])

return filtered_ports


if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake

snakemake = mock_snakemake("prepare_ports")

config = snakemake.config
# configure_logging(snakemake)

# run = snakemake.config.get("run", {})
Expand Down Expand Up @@ -102,3 +134,11 @@ def download_ports():
ports["fraction"] = ports["Harbor_size_nr"] / ports["Total_Harbor_size_nr"]

ports.to_csv(snakemake.output[0], sep=",", encoding="utf-8", header="true")

if snakemake.params.custom_export:
custom_export_path = Path.joinpath("data", "custom", "export_ports.csv")
shutil.move(custom_export_path, snakemake.output[1])
else:
filter_ports(ports).to_csv(
snakemake.output[1], sep=",", encoding="utf-8", header="true"
)
1 change: 1 addition & 0 deletions test/config.test_myopic.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ custom_data:
add_existing: false
custom_sectors: false
gas_network: false # If "True" then a custom .csv file must be placed in "resources/custom_data/pipelines.csv" , If "False" the user can choose btw "greenfield" or Model built-in datasets. Please refer to ["sector"] below.
export_ports: false # If "True" then a custom .csv file must be placed in "data/custom/export.csv"


costs: # Costs used in PyPSA-Earth-Sec. Year depends on the wildcard planning_horizon in the scenario section
Expand Down

0 comments on commit e6e3347

Please sign in to comment.